import { useEffect, useState, useRef, MouseEvent } from "react"; type WaveformProps = { audioContext: AudioContext; }; type AudioFile = { bytes: number; channels: number; frames: number; sampleRate: number; }; export const Waveform: React.FC = ({ audioContext }: WaveformProps) => { const [audioFile, setAudioFile] = useState(null); const [currentTime, setCurrentTime] = useState(0); const [audio, setAudio] = useState(new Audio()); const waveformCanvasRef = useRef(null); const hudCanvasRef = useRef(null); const canvasLogicalWidth = 2000; const canvasLogicalHeight = 500; const videoID = new URLSearchParams(window.location.search).get("video_id") // helpers const mouseEventToCanvasX = (evt: MouseEvent): number => { // TODO: use offsetX/offsetY? const rect = evt.currentTarget.getBoundingClientRect(); const elementX = evt.clientX - rect.left; const canvas = evt.target as HTMLCanvasElement; return elementX * canvas.width / rect.width; }; const canvasXToFrame = (x: number): number => { if (audioFile == null) { return 0; } return Math.floor((x / canvasLogicalWidth) * audioFile.frames); } const canvasXToSecs = (x: number): number => { if (audioFile == null) { return 0; } const duration = audioFile.frames / audioFile.sampleRate; return (canvasXToFrame(x) / audioFile.frames) * duration; } const secsToCanvasX = (canvasWidth: number, secs: number): number => { if (audioFile == null) { return 0; } const duration = audioFile.frames / audioFile.sampleRate; return Math.floor(canvasWidth * (secs / duration)); }; // effects // setup player on page load: useEffect(() => { (async function() { audio.addEventListener("timeupdate", () => { setCurrentTime(audio.currentTime); }); })() }, [audio]); // load audio data on page load: useEffect(() => { (async function() { console.log("fetching audio data..."); const resp = await fetch(`http://localhost:8888/api/download?video_id=${videoID}`); const respBody = await resp.json(); if (respBody.error) { console.log("error fetching audio data:", respBody.error) return; } // TODO: safer deserialization? const audioFile: AudioFile = { bytes: respBody.bytes, channels: respBody.channels, frames: respBody.frames, sampleRate: respBody.sample_rate, }; setAudioFile(audioFile); })(); }, [audioContext]); // render waveform to canvas when audioData is updated: useEffect(() => { (async function() { if (audioFile == null) { return; } console.log("audiofile is", audioFile); const canvas = waveformCanvasRef.current; if (canvas == null) { console.error("no canvas ref available"); return; } const ctx = canvas.getContext("2d"); if (ctx == null) { console.error("no 2d context available"); return; } ctx.strokeStyle = '#00aa00'; ctx.fillStyle = 'black'; ctx.fillRect(0, 0, canvas.width, canvas.height); const resp = await fetch(`http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round(audioFile.frames)}&bins=${canvas.width}`); const peaks = await resp.json(); console.log("respBody from peaks =", peaks) const numChannels = peaks.length; const chanHeight = canvas.height / numChannels; for (let c = 0; c < numChannels; c++) { const yOffset = chanHeight * c; for (let i = 0; i < peaks[c].length; i++) { const val = peaks[c][i]; const height = Math.floor((val / 32768) * chanHeight); const y1 = ((chanHeight - height)/2)+yOffset; const y2 = y1 + height; ctx.beginPath(); ctx.moveTo(i, y1) ctx.lineTo(i, y2) ctx.stroke() } } })(); }, [audioFile]); // redraw HUD useEffect(() => { (async function() { const canvas = hudCanvasRef.current; if (canvas == null) { console.error("no hud canvas ref available"); return; } const ctx = canvas.getContext("2d"); if (ctx == null) { console.error("no hud 2d context available"); return; } ctx.clearRect(0, 0, canvas.width, canvas.height); const x = secsToCanvasX(canvas.width, currentTime); ctx.strokeStyle = "red"; ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, canvas.height); ctx.stroke(); })() }, [currentTime]); // callbacks const handleMouseMove = (evt: MouseEvent) => { const canvasX = mouseEventToCanvasX(evt); console.log("mousemove, x =", canvasX, "frame =", canvasXToFrame(canvasX)); } const handleMouseDown = (evt: MouseEvent) => { if (audioFile == null) { return; } const canvasX = mouseEventToCanvasX(evt); audio.currentTime = canvasXToSecs(canvasX); console.log("currentTime now", canvasXToSecs(canvasX)); }; const handleMouseUp = () => { return null; }; const handlePlay = async () => { const url = `http://localhost:8888/api/audio?video_id=${videoID}`; audio.src = url; await audio.play(); console.log("playing audio from", url); } const handlePause = () => { audio.pause(); console.log("paused audio") } const handleZoomIn = () => { console.log("zoom in"); }; const handleZoomOut = () => { console.log("zoom out"); }; // render component: const wrapperProps = {width: "90%", height: "500px", position: "relative", margin: "0 auto"} as React.CSSProperties; const waveformCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 0} as React.CSSProperties; const hudCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 1} as React.CSSProperties; const clockTextAreaProps = {color: "#999", width: "400px"}; return <>

clipper

}