From faf818e4aed98f87a05c8e3ec2fac09dcf1576f8 Mon Sep 17 00:00:00 2001 From: Rob Watson Date: Sat, 11 Sep 2021 12:58:43 +0200 Subject: [PATCH] Implement basic zoom in/out --- frontend/src/Waveform.tsx | 77 +++++++++++++++++++++++++++------------ 1 file changed, 54 insertions(+), 23 deletions(-) diff --git a/frontend/src/Waveform.tsx b/frontend/src/Waveform.tsx index 998a74b..3cfc423 100644 --- a/frontend/src/Waveform.tsx +++ b/frontend/src/Waveform.tsx @@ -11,12 +11,20 @@ type AudioFile = { sampleRate: number; }; +type ZoomSettings = { + startFrame: number; + endFrame: number; +}; + +const defaultZoomSettings: ZoomSettings = { startFrame: 0, endFrame: 0 }; + export const Waveform: React.FC = ({ audioContext, }: WaveformProps) => { const [audioFile, setAudioFile] = useState(null); const [currentTime, setCurrentTime] = useState(0); const [audio, setAudio] = useState(new Audio()); + const [zoomSettings, setZoomSettings] = useState(defaultZoomSettings); const waveformCanvasRef = useRef(null); const hudCanvasRef = useRef(null); @@ -41,13 +49,13 @@ export const Waveform: React.FC = ({ return Math.floor((x / canvasLogicalWidth) * audioFile.frames); }; - const canvasXToSecs = (x: number): number => { - if (audioFile == null) { - return 0; - } - const duration = audioFile.frames / audioFile.sampleRate; - return (canvasXToFrame(x) / audioFile.frames) * duration; - }; + // const canvasXToSecs = (x: number): number => { + // if (audioFile == null) { + // return 0; + // } + // const duration = audioFile.frames / audioFile.sampleRate; + // return (canvasXToFrame(x) / audioFile.frames) * duration; + // }; const secsToCanvasX = (canvasWidth: number, secs: number): number => { if (audioFile == null) { @@ -92,10 +100,11 @@ export const Waveform: React.FC = ({ }; setAudioFile(audioFile); + setZoomSettings({ startFrame: 0, endFrame: audioFile.frames }); })(); }, [audioContext]); - // render waveform to canvas when audioData is updated: + // render waveform to canvas when zoom settings are updated: useEffect(() => { (async function () { if (audioFile == null) { @@ -116,18 +125,21 @@ export const Waveform: React.FC = ({ return; } - ctx.strokeStyle = '#00aa00'; - ctx.fillStyle = 'black'; - ctx.fillRect(0, 0, canvas.width, canvas.height); + let endFrame = zoomSettings.endFrame; + if (endFrame <= zoomSettings.startFrame) { + endFrame = audioFile.frames; + } const resp = await fetch( - `http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round( - audioFile.frames - )}&bins=${canvas.width}` + `http://localhost:8888/api/peaks?video_id=${videoID}&start=${zoomSettings.startFrame}&end=${endFrame}&bins=${canvas.width}` ); const peaks = await resp.json(); console.log('respBody from peaks =', peaks); + ctx.strokeStyle = '#00aa00'; + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + const numChannels = peaks.length; const chanHeight = canvas.height / numChannels; for (let c = 0; c < numChannels; c++) { @@ -144,7 +156,7 @@ export const Waveform: React.FC = ({ } } })(); - }, [audioFile]); + }, [zoomSettings]); // redraw HUD useEffect(() => { @@ -163,6 +175,10 @@ export const Waveform: React.FC = ({ ctx.clearRect(0, 0, canvas.width, canvas.height); + if (audioFile == null) { + return; + } + const x = secsToCanvasX(canvas.width, currentTime); ctx.strokeStyle = 'red'; @@ -180,14 +196,8 @@ export const Waveform: React.FC = ({ console.log('mousemove, x =', canvasX, 'frame =', canvasXToFrame(canvasX)); }; - const handleMouseDown = (evt: MouseEvent) => { - if (audioFile == null) { - return; - } - - const canvasX = mouseEventToCanvasX(evt); - audio.currentTime = canvasXToSecs(canvasX); - console.log('currentTime now', canvasXToSecs(canvasX)); + const handleMouseDown = () => { + return null; }; const handleMouseUp = () => { @@ -207,11 +217,29 @@ export const Waveform: React.FC = ({ }; const handleZoomIn = () => { + if (audioFile == null) { + return; + } console.log('zoom in'); + const diff = zoomSettings.endFrame - zoomSettings.startFrame; + const endFrame = zoomSettings.startFrame + Math.floor(diff / 2); + const settings = { ...zoomSettings, endFrame: endFrame }; + setZoomSettings(settings); }; const handleZoomOut = () => { + if (audioFile == null) { + return; + } console.log('zoom out'); + const diff = zoomSettings.endFrame - zoomSettings.startFrame; + const newDiff = diff * 2; + const endFrame = Math.min( + zoomSettings.endFrame + newDiff, + audioFile.frames + ); + const settings = { ...zoomSettings, endFrame: endFrame }; + setZoomSettings(settings); }; // render component: @@ -222,6 +250,7 @@ export const Waveform: React.FC = ({ position: 'relative', margin: '0 auto', } as React.CSSProperties; + const waveformCanvasProps = { width: '100%', position: 'absolute', @@ -231,6 +260,7 @@ export const Waveform: React.FC = ({ bottom: 0, zIndex: 0, } as React.CSSProperties; + const hudCanvasProps = { width: '100%', position: 'absolute', @@ -240,6 +270,7 @@ export const Waveform: React.FC = ({ bottom: 0, zIndex: 1, } as React.CSSProperties; + const clockTextAreaProps = { color: '#999', width: '400px' }; return (