Reformat with prettier
This commit is contained in:
parent
4d7c1b9e6c
commit
06e4b7f550
|
@ -1,4 +1,4 @@
|
|||
import { useEffect, useState, useRef, MouseEvent } from "react";
|
||||
import { useEffect, useState, useRef, MouseEvent } from 'react';
|
||||
|
||||
type WaveformProps = {
|
||||
audioContext: AudioContext;
|
||||
|
@ -11,7 +11,9 @@ type AudioFile = {
|
|||
sampleRate: number;
|
||||
};
|
||||
|
||||
export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProps) => {
|
||||
export const Waveform: React.FC<WaveformProps> = ({
|
||||
audioContext,
|
||||
}: WaveformProps) => {
|
||||
const [audioFile, setAudioFile] = useState<AudioFile | null>(null);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [audio, setAudio] = useState(new Audio());
|
||||
|
@ -20,7 +22,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const canvasLogicalWidth = 2000;
|
||||
const canvasLogicalHeight = 500;
|
||||
const videoID = new URLSearchParams(window.location.search).get("video_id")
|
||||
const videoID = new URLSearchParams(window.location.search).get('video_id');
|
||||
|
||||
// helpers
|
||||
|
||||
|
@ -29,7 +31,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
const rect = evt.currentTarget.getBoundingClientRect();
|
||||
const elementX = evt.clientX - rect.left;
|
||||
const canvas = evt.target as HTMLCanvasElement;
|
||||
return elementX * canvas.width / rect.width;
|
||||
return (elementX * canvas.width) / rect.width;
|
||||
};
|
||||
|
||||
const canvasXToFrame = (x: number): number => {
|
||||
|
@ -37,7 +39,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
return 0;
|
||||
}
|
||||
return Math.floor((x / canvasLogicalWidth) * audioFile.frames);
|
||||
}
|
||||
};
|
||||
|
||||
const canvasXToSecs = (x: number): number => {
|
||||
if (audioFile == null) {
|
||||
|
@ -45,7 +47,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
}
|
||||
const duration = audioFile.frames / audioFile.sampleRate;
|
||||
return (canvasXToFrame(x) / audioFile.frames) * duration;
|
||||
}
|
||||
};
|
||||
|
||||
const secsToCanvasX = (canvasWidth: number, secs: number): number => {
|
||||
if (audioFile == null) {
|
||||
|
@ -56,24 +58,28 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
};
|
||||
|
||||
// effects
|
||||
|
||||
|
||||
// setup player on page load:
|
||||
useEffect(() => {
|
||||
(async function() {
|
||||
audio.addEventListener("timeupdate", () => { setCurrentTime(audio.currentTime); });
|
||||
})()
|
||||
(async function () {
|
||||
audio.addEventListener('timeupdate', () => {
|
||||
setCurrentTime(audio.currentTime);
|
||||
});
|
||||
})();
|
||||
}, [audio]);
|
||||
|
||||
// load audio data on page load:
|
||||
useEffect(() => {
|
||||
(async function() {
|
||||
console.log("fetching audio data...");
|
||||
(async function () {
|
||||
console.log('fetching audio data...');
|
||||
|
||||
const resp = await fetch(`http://localhost:8888/api/download?video_id=${videoID}`);
|
||||
const resp = await fetch(
|
||||
`http://localhost:8888/api/download?video_id=${videoID}`
|
||||
);
|
||||
const respBody = await resp.json();
|
||||
|
||||
if (respBody.error) {
|
||||
console.log("error fetching audio data:", respBody.error)
|
||||
console.log('error fetching audio data:', respBody.error);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -91,64 +97,67 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
|
||||
// render waveform to canvas when audioData is updated:
|
||||
useEffect(() => {
|
||||
(async function() {
|
||||
if (audioFile == null) {
|
||||
return;
|
||||
}
|
||||
(async function () {
|
||||
if (audioFile == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("audiofile is", audioFile);
|
||||
console.log('audiofile is', audioFile);
|
||||
|
||||
const canvas = waveformCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error("no canvas ref available");
|
||||
return;
|
||||
}
|
||||
const canvas = waveformCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error('no canvas ref available');
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext("2d");
|
||||
if (ctx == null) {
|
||||
console.error("no 2d context available");
|
||||
return;
|
||||
}
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx == null) {
|
||||
console.error('no 2d context available');
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.strokeStyle = '#00aa00';
|
||||
ctx.fillStyle = 'black';
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
ctx.strokeStyle = '#00aa00';
|
||||
ctx.fillStyle = 'black';
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
const resp = await fetch(`http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round(audioFile.frames)}&bins=${canvas.width}`);
|
||||
const peaks = await resp.json();
|
||||
console.log("respBody from peaks =", peaks)
|
||||
const resp = await fetch(
|
||||
`http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round(
|
||||
audioFile.frames
|
||||
)}&bins=${canvas.width}`
|
||||
);
|
||||
const peaks = await resp.json();
|
||||
console.log('respBody from peaks =', peaks);
|
||||
|
||||
const numChannels = peaks.length;
|
||||
const chanHeight = canvas.height / numChannels;
|
||||
for (let c = 0; c < numChannels; c++) {
|
||||
const yOffset = chanHeight * c;
|
||||
for (let i = 0; i < peaks[c].length; i++) {
|
||||
const val = peaks[c][i];
|
||||
const height = Math.floor((val / 32768) * chanHeight);
|
||||
const y1 = ((chanHeight - height)/2)+yOffset;
|
||||
const y2 = y1 + height;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(i, y1)
|
||||
ctx.lineTo(i, y2)
|
||||
ctx.stroke()
|
||||
}
|
||||
}
|
||||
const numChannels = peaks.length;
|
||||
const chanHeight = canvas.height / numChannels;
|
||||
for (let c = 0; c < numChannels; c++) {
|
||||
const yOffset = chanHeight * c;
|
||||
for (let i = 0; i < peaks[c].length; i++) {
|
||||
const val = peaks[c][i];
|
||||
const height = Math.floor((val / 32768) * chanHeight);
|
||||
const y1 = (chanHeight - height) / 2 + yOffset;
|
||||
const y2 = y1 + height;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(i, y1);
|
||||
ctx.lineTo(i, y2);
|
||||
ctx.stroke();
|
||||
}
|
||||
}
|
||||
})();
|
||||
}, [audioFile]);
|
||||
|
||||
// redraw HUD
|
||||
useEffect(() => {
|
||||
(async function() {
|
||||
|
||||
(async function () {
|
||||
const canvas = hudCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error("no hud canvas ref available");
|
||||
console.error('no hud canvas ref available');
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext("2d");
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx == null) {
|
||||
console.error("no hud 2d context available");
|
||||
console.error('no hud 2d context available');
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -156,21 +165,20 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
|
||||
const x = secsToCanvasX(canvas.width, currentTime);
|
||||
|
||||
ctx.strokeStyle = "red";
|
||||
ctx.strokeStyle = 'red';
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, 0);
|
||||
ctx.lineTo(x, canvas.height);
|
||||
ctx.stroke();
|
||||
|
||||
})()
|
||||
})();
|
||||
}, [currentTime]);
|
||||
|
||||
// callbacks
|
||||
|
||||
|
||||
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
|
||||
const canvasX = mouseEventToCanvasX(evt);
|
||||
console.log("mousemove, x =", canvasX, "frame =", canvasXToFrame(canvasX));
|
||||
}
|
||||
console.log('mousemove, x =', canvasX, 'frame =', canvasXToFrame(canvasX));
|
||||
};
|
||||
|
||||
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
|
||||
if (audioFile == null) {
|
||||
|
@ -179,7 +187,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
|
||||
const canvasX = mouseEventToCanvasX(evt);
|
||||
audio.currentTime = canvasXToSecs(canvasX);
|
||||
console.log("currentTime now", canvasXToSecs(canvasX));
|
||||
console.log('currentTime now', canvasXToSecs(canvasX));
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
|
@ -190,39 +198,75 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
|
|||
const url = `http://localhost:8888/api/audio?video_id=${videoID}`;
|
||||
audio.src = url;
|
||||
await audio.play();
|
||||
console.log("playing audio from", url);
|
||||
}
|
||||
console.log('playing audio from', url);
|
||||
};
|
||||
|
||||
const handlePause = () => {
|
||||
audio.pause();
|
||||
console.log("paused audio")
|
||||
}
|
||||
audio.pause();
|
||||
console.log('paused audio');
|
||||
};
|
||||
|
||||
const handleZoomIn = () => {
|
||||
console.log("zoom in");
|
||||
console.log('zoom in');
|
||||
};
|
||||
|
||||
const handleZoomOut = () => {
|
||||
console.log("zoom out");
|
||||
console.log('zoom out');
|
||||
};
|
||||
|
||||
// render component:
|
||||
|
||||
const wrapperProps = {width: "90%", height: "500px", position: "relative", margin: "0 auto"} as React.CSSProperties;
|
||||
const waveformCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 0} as React.CSSProperties;
|
||||
const hudCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 1} as React.CSSProperties;
|
||||
const clockTextAreaProps = {color: "#999", width: "400px"};
|
||||
const wrapperProps = {
|
||||
width: '90%',
|
||||
height: '500px',
|
||||
position: 'relative',
|
||||
margin: '0 auto',
|
||||
} as React.CSSProperties;
|
||||
const waveformCanvasProps = {
|
||||
width: '100%',
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
zIndex: 0,
|
||||
} as React.CSSProperties;
|
||||
const hudCanvasProps = {
|
||||
width: '100%',
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
zIndex: 1,
|
||||
} as React.CSSProperties;
|
||||
const clockTextAreaProps = { color: '#999', width: '400px' };
|
||||
|
||||
return <>
|
||||
<h1>clipper</h1>
|
||||
<div style={wrapperProps}>
|
||||
<canvas ref={waveformCanvasRef} width={canvasLogicalWidth} height={canvasLogicalHeight} style={waveformCanvasProps}></canvas>
|
||||
<canvas ref={hudCanvasRef} width={canvasLogicalWidth} height={canvasLogicalHeight} onMouseMove={handleMouseMove} onMouseDown={handleMouseDown} onMouseUp={handleMouseUp} style={hudCanvasProps}></canvas>
|
||||
</div>
|
||||
<button onClick={handlePlay}>Play</button>
|
||||
<button onClick={handlePause}>Pause</button>
|
||||
<button onClick={handleZoomIn}>+</button>
|
||||
<button onClick={handleZoomOut}>-</button>
|
||||
<input type="readonly" style={clockTextAreaProps}/>
|
||||
</>
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<h1>clipper</h1>
|
||||
<div style={wrapperProps}>
|
||||
<canvas
|
||||
ref={waveformCanvasRef}
|
||||
width={canvasLogicalWidth}
|
||||
height={canvasLogicalHeight}
|
||||
style={waveformCanvasProps}
|
||||
></canvas>
|
||||
<canvas
|
||||
ref={hudCanvasRef}
|
||||
width={canvasLogicalWidth}
|
||||
height={canvasLogicalHeight}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseDown={handleMouseDown}
|
||||
onMouseUp={handleMouseUp}
|
||||
style={hudCanvasProps}
|
||||
></canvas>
|
||||
</div>
|
||||
<button onClick={handlePlay}>Play</button>
|
||||
<button onClick={handlePause}>Pause</button>
|
||||
<button onClick={handleZoomIn}>+</button>
|
||||
<button onClick={handleZoomOut}>-</button>
|
||||
<input type="readonly" style={clockTextAreaProps} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
Loading…
Reference in New Issue