import { useEffect, useState, useRef, MouseEvent } from 'react'; import { WaveformCanvas } from './WaveformCanvas'; type Props = { audioContext: AudioContext; }; type AudioFile = { bytes: number; channels: number; frames: number; sampleRate: number; }; type ZoomSettings = { startFrame: number; endFrame: number; }; const defaultZoomSettings: ZoomSettings = { startFrame: 0, endFrame: 0 }; export const Waveform: React.FC = ({ audioContext }: Props) => { const [audioFile, setAudioFile] = useState(null); const [currentTime, setCurrentTime] = useState(0); const [audio, setAudio] = useState(new Audio()); const [zoomSettings, setZoomSettings] = useState(defaultZoomSettings); const [waveformPeaks, setWaveformPeaks] = useState(null); const [overviewPeaks, setOverviewPeaks] = useState(null); const hudCanvasRef = useRef(null); const canvasLogicalWidth = 2000; const canvasLogicalHeight = 500; const videoID = new URLSearchParams(window.location.search).get('video_id'); // helpers const mouseEventToCanvasX = (evt: MouseEvent): number => { // TODO: use offsetX/offsetY? const rect = evt.currentTarget.getBoundingClientRect(); const elementX = evt.clientX - rect.left; const canvas = evt.target as HTMLCanvasElement; return (elementX * canvas.width) / rect.width; }; const canvasXToFrame = (x: number): number => { if (audioFile == null) { return 0; } return Math.floor((x / canvasLogicalWidth) * audioFile.frames); }; const secsToCanvasX = (canvasWidth: number, secs: number): number => { if (audioFile == null) { return 0; } const duration = audioFile.frames / audioFile.sampleRate; return Math.floor(canvasWidth * (secs / duration)); }; // effects // setup player on page load: useEffect(() => { (async function () { audio.addEventListener('timeupdate', () => { setCurrentTime(audio.currentTime); }); })(); }, [audio]); // fetch audio data on page load: useEffect(() => { (async function () { console.log('fetching audio data...'); const resp = await fetch( `http://localhost:8888/api/download?video_id=${videoID}` ); const respBody = await resp.json(); if (respBody.error) { console.log('error fetching audio data:', respBody.error); return; } // TODO: safer deserialization? const audioFile: AudioFile = { bytes: respBody.bytes, channels: respBody.channels, frames: respBody.frames, sampleRate: respBody.sample_rate, }; setAudioFile(audioFile); setZoomSettings({ startFrame: 0, endFrame: audioFile.frames }); })(); }, [audioContext]); // render overview waveform to canvas when the audio file is loaded: // fetch new waveform peaks when zoom settings are updated: useEffect(() => { (async function () { if (audioFile == null) { return; } let endFrame = zoomSettings.endFrame; if (endFrame <= zoomSettings.startFrame) { endFrame = audioFile.frames; } const resp = await fetch( `http://localhost:8888/api/peaks?video_id=${videoID}&start=${zoomSettings.startFrame}&end=${endFrame}&bins=${canvasLogicalWidth}` ); const peaks = await resp.json(); console.log('respBody from peaks =', peaks); setWaveformPeaks(peaks); if (overviewPeaks == null) { setOverviewPeaks(peaks); } })(); }, [zoomSettings]); // redraw HUD useEffect(() => { (async function () { const canvas = hudCanvasRef.current; if (canvas == null) { console.error('no hud canvas ref available'); return; } const ctx = canvas.getContext('2d'); if (ctx == null) { console.error('no hud 2d context available'); return; } ctx.clearRect(0, 0, canvas.width, canvas.height); if (audioFile == null) { return; } const x = secsToCanvasX(canvas.width, currentTime); ctx.strokeStyle = 'red'; ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, canvas.height); ctx.stroke(); })(); }, [currentTime]); // callbacks const handleMouseMove = (evt: MouseEvent) => { const canvasX = mouseEventToCanvasX(evt); console.log('mousemove, x =', canvasX, 'frame =', canvasXToFrame(canvasX)); }; const handleMouseDown = () => { return null; }; const handleMouseUp = () => { return null; }; const handlePlay = async () => { const url = `http://localhost:8888/api/audio?video_id=${videoID}`; audio.src = url; await audio.play(); console.log('playing audio from', url); }; const handlePause = () => { audio.pause(); console.log('paused audio'); }; const handleZoomIn = () => { if (audioFile == null) { return; } console.log('zoom in'); const diff = zoomSettings.endFrame - zoomSettings.startFrame; const endFrame = zoomSettings.startFrame + Math.floor(diff / 2); const settings = { ...zoomSettings, endFrame: endFrame }; setZoomSettings(settings); }; const handleZoomOut = () => { if (audioFile == null) { return; } console.log('zoom out'); const diff = zoomSettings.endFrame - zoomSettings.startFrame; const newDiff = diff * 2; const endFrame = Math.min( zoomSettings.endFrame + newDiff, audioFile.frames ); const settings = { ...zoomSettings, endFrame: endFrame }; setZoomSettings(settings); }; // render component: const wrapperProps = { width: '90%', height: '350px', position: 'relative', margin: '0 auto', } as React.CSSProperties; const waveformCanvasProps = { width: '100%', height: '100%', position: 'absolute', top: 0, left: 0, right: 0, bottom: 0, zIndex: 0, } as React.CSSProperties; const hudCanvasProps = { width: '100%', height: '100%', position: 'absolute', top: 0, left: 0, right: 0, bottom: 0, zIndex: 1, } as React.CSSProperties; const overviewCanvasProps = { width: '90%', height: '90px', margin: '0 auto', display: 'block', } as React.CSSProperties; const clockTextAreaProps = { color: '#999', width: '400px' }; return ( <>

clipper

); };