Reformat with prettier

This commit is contained in:
Rob Watson 2021-09-11 12:05:58 +02:00
parent 4d7c1b9e6c
commit 06e4b7f550
1 changed files with 130 additions and 86 deletions

View File

@ -1,4 +1,4 @@
import { useEffect, useState, useRef, MouseEvent } from "react"; import { useEffect, useState, useRef, MouseEvent } from 'react';
type WaveformProps = { type WaveformProps = {
audioContext: AudioContext; audioContext: AudioContext;
@ -11,7 +11,9 @@ type AudioFile = {
sampleRate: number; sampleRate: number;
}; };
export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProps) => { export const Waveform: React.FC<WaveformProps> = ({
audioContext,
}: WaveformProps) => {
const [audioFile, setAudioFile] = useState<AudioFile | null>(null); const [audioFile, setAudioFile] = useState<AudioFile | null>(null);
const [currentTime, setCurrentTime] = useState(0); const [currentTime, setCurrentTime] = useState(0);
const [audio, setAudio] = useState(new Audio()); const [audio, setAudio] = useState(new Audio());
@ -20,7 +22,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
const hudCanvasRef = useRef<HTMLCanvasElement>(null); const hudCanvasRef = useRef<HTMLCanvasElement>(null);
const canvasLogicalWidth = 2000; const canvasLogicalWidth = 2000;
const canvasLogicalHeight = 500; const canvasLogicalHeight = 500;
const videoID = new URLSearchParams(window.location.search).get("video_id") const videoID = new URLSearchParams(window.location.search).get('video_id');
// helpers // helpers
@ -29,7 +31,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
const rect = evt.currentTarget.getBoundingClientRect(); const rect = evt.currentTarget.getBoundingClientRect();
const elementX = evt.clientX - rect.left; const elementX = evt.clientX - rect.left;
const canvas = evt.target as HTMLCanvasElement; const canvas = evt.target as HTMLCanvasElement;
return elementX * canvas.width / rect.width; return (elementX * canvas.width) / rect.width;
}; };
const canvasXToFrame = (x: number): number => { const canvasXToFrame = (x: number): number => {
@ -37,7 +39,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
return 0; return 0;
} }
return Math.floor((x / canvasLogicalWidth) * audioFile.frames); return Math.floor((x / canvasLogicalWidth) * audioFile.frames);
} };
const canvasXToSecs = (x: number): number => { const canvasXToSecs = (x: number): number => {
if (audioFile == null) { if (audioFile == null) {
@ -45,7 +47,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
} }
const duration = audioFile.frames / audioFile.sampleRate; const duration = audioFile.frames / audioFile.sampleRate;
return (canvasXToFrame(x) / audioFile.frames) * duration; return (canvasXToFrame(x) / audioFile.frames) * duration;
} };
const secsToCanvasX = (canvasWidth: number, secs: number): number => { const secsToCanvasX = (canvasWidth: number, secs: number): number => {
if (audioFile == null) { if (audioFile == null) {
@ -60,20 +62,24 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
// setup player on page load: // setup player on page load:
useEffect(() => { useEffect(() => {
(async function () { (async function () {
audio.addEventListener("timeupdate", () => { setCurrentTime(audio.currentTime); }); audio.addEventListener('timeupdate', () => {
})() setCurrentTime(audio.currentTime);
});
})();
}, [audio]); }, [audio]);
// load audio data on page load: // load audio data on page load:
useEffect(() => { useEffect(() => {
(async function () { (async function () {
console.log("fetching audio data..."); console.log('fetching audio data...');
const resp = await fetch(`http://localhost:8888/api/download?video_id=${videoID}`); const resp = await fetch(
`http://localhost:8888/api/download?video_id=${videoID}`
);
const respBody = await resp.json(); const respBody = await resp.json();
if (respBody.error) { if (respBody.error) {
console.log("error fetching audio data:", respBody.error) console.log('error fetching audio data:', respBody.error);
return; return;
} }
@ -96,17 +102,17 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
return; return;
} }
console.log("audiofile is", audioFile); console.log('audiofile is', audioFile);
const canvas = waveformCanvasRef.current; const canvas = waveformCanvasRef.current;
if (canvas == null) { if (canvas == null) {
console.error("no canvas ref available"); console.error('no canvas ref available');
return; return;
} }
const ctx = canvas.getContext("2d"); const ctx = canvas.getContext('2d');
if (ctx == null) { if (ctx == null) {
console.error("no 2d context available"); console.error('no 2d context available');
return; return;
} }
@ -114,9 +120,13 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
ctx.fillStyle = 'black'; ctx.fillStyle = 'black';
ctx.fillRect(0, 0, canvas.width, canvas.height); ctx.fillRect(0, 0, canvas.width, canvas.height);
const resp = await fetch(`http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round(audioFile.frames)}&bins=${canvas.width}`); const resp = await fetch(
`http://localhost:8888/api/peaks?video_id=${videoID}&start=0&end=${Math.round(
audioFile.frames
)}&bins=${canvas.width}`
);
const peaks = await resp.json(); const peaks = await resp.json();
console.log("respBody from peaks =", peaks) console.log('respBody from peaks =', peaks);
const numChannels = peaks.length; const numChannels = peaks.length;
const chanHeight = canvas.height / numChannels; const chanHeight = canvas.height / numChannels;
@ -125,12 +135,12 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
for (let i = 0; i < peaks[c].length; i++) { for (let i = 0; i < peaks[c].length; i++) {
const val = peaks[c][i]; const val = peaks[c][i];
const height = Math.floor((val / 32768) * chanHeight); const height = Math.floor((val / 32768) * chanHeight);
const y1 = ((chanHeight - height)/2)+yOffset; const y1 = (chanHeight - height) / 2 + yOffset;
const y2 = y1 + height; const y2 = y1 + height;
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(i, y1) ctx.moveTo(i, y1);
ctx.lineTo(i, y2) ctx.lineTo(i, y2);
ctx.stroke() ctx.stroke();
} }
} }
})(); })();
@ -139,16 +149,15 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
// redraw HUD // redraw HUD
useEffect(() => { useEffect(() => {
(async function () { (async function () {
const canvas = hudCanvasRef.current; const canvas = hudCanvasRef.current;
if (canvas == null) { if (canvas == null) {
console.error("no hud canvas ref available"); console.error('no hud canvas ref available');
return; return;
} }
const ctx = canvas.getContext("2d"); const ctx = canvas.getContext('2d');
if (ctx == null) { if (ctx == null) {
console.error("no hud 2d context available"); console.error('no hud 2d context available');
return; return;
} }
@ -156,21 +165,20 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
const x = secsToCanvasX(canvas.width, currentTime); const x = secsToCanvasX(canvas.width, currentTime);
ctx.strokeStyle = "red"; ctx.strokeStyle = 'red';
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(x, 0); ctx.moveTo(x, 0);
ctx.lineTo(x, canvas.height); ctx.lineTo(x, canvas.height);
ctx.stroke(); ctx.stroke();
})();
})()
}, [currentTime]); }, [currentTime]);
// callbacks // callbacks
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => { const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
const canvasX = mouseEventToCanvasX(evt); const canvasX = mouseEventToCanvasX(evt);
console.log("mousemove, x =", canvasX, "frame =", canvasXToFrame(canvasX)); console.log('mousemove, x =', canvasX, 'frame =', canvasXToFrame(canvasX));
} };
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => { const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
if (audioFile == null) { if (audioFile == null) {
@ -179,7 +187,7 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
const canvasX = mouseEventToCanvasX(evt); const canvasX = mouseEventToCanvasX(evt);
audio.currentTime = canvasXToSecs(canvasX); audio.currentTime = canvasXToSecs(canvasX);
console.log("currentTime now", canvasXToSecs(canvasX)); console.log('currentTime now', canvasXToSecs(canvasX));
}; };
const handleMouseUp = () => { const handleMouseUp = () => {
@ -190,34 +198,69 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
const url = `http://localhost:8888/api/audio?video_id=${videoID}`; const url = `http://localhost:8888/api/audio?video_id=${videoID}`;
audio.src = url; audio.src = url;
await audio.play(); await audio.play();
console.log("playing audio from", url); console.log('playing audio from', url);
} };
const handlePause = () => { const handlePause = () => {
audio.pause(); audio.pause();
console.log("paused audio") console.log('paused audio');
} };
const handleZoomIn = () => { const handleZoomIn = () => {
console.log("zoom in"); console.log('zoom in');
}; };
const handleZoomOut = () => { const handleZoomOut = () => {
console.log("zoom out"); console.log('zoom out');
}; };
// render component: // render component:
const wrapperProps = {width: "90%", height: "500px", position: "relative", margin: "0 auto"} as React.CSSProperties; const wrapperProps = {
const waveformCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 0} as React.CSSProperties; width: '90%',
const hudCanvasProps = {width: "100%", position: "absolute", top: 0, left: 0, right: 0, bottom: 0, zIndex: 1} as React.CSSProperties; height: '500px',
const clockTextAreaProps = {color: "#999", width: "400px"}; position: 'relative',
margin: '0 auto',
} as React.CSSProperties;
const waveformCanvasProps = {
width: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 0,
} as React.CSSProperties;
const hudCanvasProps = {
width: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 1,
} as React.CSSProperties;
const clockTextAreaProps = { color: '#999', width: '400px' };
return <> return (
<>
<h1>clipper</h1> <h1>clipper</h1>
<div style={wrapperProps}> <div style={wrapperProps}>
<canvas ref={waveformCanvasRef} width={canvasLogicalWidth} height={canvasLogicalHeight} style={waveformCanvasProps}></canvas> <canvas
<canvas ref={hudCanvasRef} width={canvasLogicalWidth} height={canvasLogicalHeight} onMouseMove={handleMouseMove} onMouseDown={handleMouseDown} onMouseUp={handleMouseUp} style={hudCanvasProps}></canvas> ref={waveformCanvasRef}
width={canvasLogicalWidth}
height={canvasLogicalHeight}
style={waveformCanvasProps}
></canvas>
<canvas
ref={hudCanvasRef}
width={canvasLogicalWidth}
height={canvasLogicalHeight}
onMouseMove={handleMouseMove}
onMouseDown={handleMouseDown}
onMouseUp={handleMouseUp}
style={hudCanvasProps}
></canvas>
</div> </div>
<button onClick={handlePlay}>Play</button> <button onClick={handlePlay}>Play</button>
<button onClick={handlePause}>Pause</button> <button onClick={handlePause}>Pause</button>
@ -225,4 +268,5 @@ export const Waveform: React.FC<WaveformProps> = ({ audioContext }: WaveformProp
<button onClick={handleZoomOut}>-</button> <button onClick={handleZoomOut}>-</button>
<input type="readonly" style={clockTextAreaProps} /> <input type="readonly" style={clockTextAreaProps} />
</> </>
} );
};