clipper/frontend/src/Waveform/Waveform.tsx

364 lines
9.2 KiB
TypeScript

import { useEffect, useState, useRef, MouseEvent } from 'react';
import { Waveform as WaveformOverview } from './Overview';
import { Thumbnails } from './Thumbnails';
import { Canvas as WaveformCanvas } from './Canvas';
import { canvasXToFrame, mouseEventToCanvasX } from './Helpers';
interface Props {
audioContext: AudioContext;
}
// Audio corresponds to media.Audio.
export interface Audio {
bytes: number;
channels: number;
frames: number;
sampleRate: number;
}
// Video corresponds to media.Video.
export interface Video {
bytes: number;
thumbnailWidth: number;
thumbnailHeight: number;
durationMillis: number;
}
// MediaSet corresponds to media.MediaSet.
export interface MediaSet {
id: string;
source: string;
audio: Audio;
video: Video;
}
export interface Selection {
x1: number;
x2: number;
}
interface ZoomSettings {
startFrame: number;
endFrame: number;
}
const defaultZoomSettings: ZoomSettings = { startFrame: 0, endFrame: 0 };
export const CanvasLogicalWidth = 2000;
export const CanvasLogicalHeight = 500;
export const Waveform: React.FC<Props> = ({ audioContext }: Props) => {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [currentTime, setCurrentTime] = useState(0);
// TODO: extract to player component.
const [audio, _setAudio] = useState(new Audio());
const [zoomSettings, setZoomSettings] = useState(defaultZoomSettings);
const [waveformPeaks, setWaveformPeaks] = useState(null);
const [overviewPeaks, setOverviewPeaks] = useState(null);
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
// helpers
// secsToCanvasX returns the logical x coordinate for a given position
// marker. It is null if the marker is outside of the current viewport.
const secsToCanvasX = (secs: number): number | null => {
if (mediaSet == null) {
return null;
}
const frame = secs * mediaSet.audio.sampleRate;
if (frame < zoomSettings.startFrame || frame > zoomSettings.endFrame) {
return null;
}
const logicalPixelsPerFrame =
CanvasLogicalWidth / (zoomSettings.endFrame - zoomSettings.startFrame);
return (frame - zoomSettings.startFrame) * logicalPixelsPerFrame;
};
// effects
// setup player on page load:
useEffect(() => {
(async function () {
audio.addEventListener('timeupdate', () => {
setCurrentTime(audio.currentTime);
});
})();
}, []);
// fetch mediaset on page load:
useEffect(() => {
(async function () {
console.log('fetching media...');
const resp = await fetch(
`http://localhost:8888/api/media_sets/${videoID}`
);
const respBody = await resp.json();
if (respBody.error) {
console.log('error fetching media set:', respBody.error);
return;
}
const mediaSet: MediaSet = {
id: respBody.id,
source: respBody.source,
audio: {
sampleRate: respBody.audio.sample_rate,
bytes: respBody.audio.bytes,
frames: respBody.audio.frames,
channels: respBody.audio.channels,
},
video: {
bytes: respBody.video.bytes,
thumbnailWidth: respBody.video.thumbnail_width,
thumbnailHeight: respBody.video.thumbnail_height,
durationMillis: Math.floor(respBody.video.duration / 1000 / 1000),
},
};
setMediaSet(mediaSet);
setZoomSettings({ startFrame: 0, endFrame: mediaSet.audio.frames });
})();
}, [audioContext]);
// load video when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
const url = `http://localhost:8888/api/media_sets/${videoID}/audio`;
audio.src = url;
audio.muted = false;
audio.volume = 1;
}, [mediaSet]);
// fetch new waveform peaks when zoom settings are updated:
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
let endFrame = zoomSettings.endFrame;
if (endFrame <= zoomSettings.startFrame) {
endFrame = mediaSet.audio.frames;
}
const resp = await fetch(
`http://localhost:8888/api/media_sets/${videoID}/peaks?start=${zoomSettings.startFrame}&end=${endFrame}&bins=${CanvasLogicalWidth}`
);
const peaks = await resp.json();
setWaveformPeaks(peaks);
if (overviewPeaks == null) {
setOverviewPeaks(peaks);
}
})();
}, [zoomSettings]);
// redraw HUD
useEffect(() => {
(async function () {
const canvas = hudCanvasRef.current;
if (canvas == null) {
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (mediaSet == null) {
return;
}
const x = secsToCanvasX(currentTime);
if (x == null) {
return;
}
ctx.strokeStyle = 'red';
ctx.beginPath();
ctx.moveTo(x, 0);
ctx.lineTo(x, canvas.height);
ctx.stroke();
})();
}, [currentTime]);
// end of hook configuration.
// TODO: render loading page here.
if (mediaSet == null) {
return null;
}
// callbacks
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mediaSet == null) {
return;
}
const canvasX = mouseEventToCanvasX(evt);
console.log(
'mousemove, x =',
canvasX,
'frame =',
canvasXToFrame(canvasX, mediaSet.audio.frames)
);
};
const handleMouseDown = () => {
return null;
};
const handleMouseUp = () => {
return null;
};
const handlePlay = async () => {
await audio.play();
};
const handlePause = () => {
audio.pause();
};
const handleZoomIn = () => {
if (mediaSet == null) {
return;
}
console.log('zoom in');
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
const endFrame = zoomSettings.startFrame + Math.floor(diff / 2);
const settings = { ...zoomSettings, endFrame: endFrame };
setZoomSettings(settings);
};
const handleZoomOut = () => {
if (mediaSet == null) {
return;
}
console.log('zoom out');
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
const newDiff = diff * 2;
const endFrame = Math.min(
zoomSettings.endFrame + newDiff,
mediaSet.audio.frames
);
const settings = { ...zoomSettings, endFrame: endFrame };
setZoomSettings(settings);
};
const handleSelectionStart = (x: number) => {
const frame = canvasXToFrame(x, mediaSet.audio.frames);
if (audio.paused) {
audio.currentTime = frame / mediaSet.audio.sampleRate;
}
};
const handleSelectionChange = (selection: Selection) => {
if (mediaSet == null) {
return;
}
const startFrame = canvasXToFrame(selection.x1, mediaSet.audio.frames);
const endFrame = canvasXToFrame(selection.x2, mediaSet.audio.frames);
const settings: ZoomSettings = {
startFrame: startFrame,
endFrame: endFrame,
};
setZoomSettings(settings);
audio.currentTime = startFrame / mediaSet.audio.sampleRate;
};
// render component:
const wrapperProps = {
width: '90%',
height: '550px',
position: 'relative',
margin: '0 auto',
} as React.CSSProperties;
const waveformCanvasProps = {
width: '100%',
height: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 0,
} as React.CSSProperties;
const hudCanvasProps = {
width: '100%',
height: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 1,
} as React.CSSProperties;
const overviewStyles = { ...wrapperProps, height: '120px' };
// TODO: why is the margin needed?
const controlPanelStyles = { margin: '1em' } as React.CSSProperties;
const clockTextAreaProps = { color: '#999', width: '400px' };
const thumbnailStyles = {
width: '90%',
height: '35px',
margin: '10px auto 0 auto',
display: 'block',
};
return (
<>
<Thumbnails mediaSet={mediaSet} style={thumbnailStyles} />
<WaveformOverview
peaks={overviewPeaks}
numFrames={mediaSet.audio.frames}
style={overviewStyles}
onSelectionStart={handleSelectionStart}
onSelectionChange={handleSelectionChange}
></WaveformOverview>
<div style={wrapperProps}>
<WaveformCanvas
peaks={waveformPeaks}
fillStyle="black"
strokeStyle="green"
style={waveformCanvasProps}
></WaveformCanvas>
<canvas
ref={hudCanvasRef}
onMouseMove={handleMouseMove}
onMouseDown={handleMouseDown}
onMouseUp={handleMouseUp}
style={hudCanvasProps}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
></canvas>
</div>
<div style={controlPanelStyles}>
<button onClick={handlePlay}>Play</button>
<button onClick={handlePause}>Pause</button>
<button onClick={handleZoomIn}>+</button>
<button onClick={handleZoomOut}>-</button>
<input type="readonly" style={clockTextAreaProps} />
</div>
</>
);
};