367 lines
8.9 KiB
TypeScript
367 lines
8.9 KiB
TypeScript
import { useEffect, useState, useRef, MouseEvent } from 'react';
|
|
import { Waveform as WaveformOverview } from './Waveform/Overview';
|
|
import { Thumbnails } from './Waveform/Thumbnails';
|
|
import { Canvas as WaveformCanvas } from './Waveform/Canvas';
|
|
import {
|
|
secsToCanvasX,
|
|
canvasXToFrame,
|
|
mouseEventToCanvasX,
|
|
} from './Waveform/Helpers';
|
|
|
|
type Props = {
|
|
audioContext: AudioContext;
|
|
};
|
|
|
|
// Audio corresponds to media.Audio.
|
|
export type Audio = {
|
|
bytes: number;
|
|
channels: number;
|
|
frames: number;
|
|
sampleRate: number;
|
|
};
|
|
|
|
// Video corresponds to media.Video.
|
|
export type Video = {
|
|
bytes: number;
|
|
thumbnailWidth: number;
|
|
thumbnailHeight: number;
|
|
durationMillis: number;
|
|
};
|
|
|
|
// MediaSet corresponds to media.MediaSet.
|
|
export type MediaSet = {
|
|
id: string;
|
|
source: string;
|
|
audio: Audio;
|
|
video: Video;
|
|
};
|
|
|
|
export type Selection = {
|
|
x1: number;
|
|
x2: number;
|
|
};
|
|
|
|
type ZoomSettings = {
|
|
startFrame: number;
|
|
endFrame: number;
|
|
};
|
|
|
|
const defaultZoomSettings: ZoomSettings = { startFrame: 0, endFrame: 0 };
|
|
|
|
export const CanvasLogicalWidth = 2000;
|
|
export const CanvasLogicalHeight = 500;
|
|
|
|
export const Waveform: React.FC<Props> = ({ audioContext }: Props) => {
|
|
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
|
|
const [currentTime, setCurrentTime] = useState(0);
|
|
// TODO: extract to player component.
|
|
const [audio, setAudio] = useState(new Audio());
|
|
const [zoomSettings, setZoomSettings] = useState(defaultZoomSettings);
|
|
const [waveformPeaks, setWaveformPeaks] = useState(null);
|
|
const [overviewPeaks, setOverviewPeaks] = useState(null);
|
|
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
|
const videoRef = useRef<HTMLVideoElement>(null);
|
|
|
|
// TODO: error handling
|
|
const videoID = new URLSearchParams(window.location.search).get('video_id');
|
|
|
|
// effects
|
|
|
|
// setup player on page load:
|
|
useEffect(() => {
|
|
(async function () {
|
|
const video = videoRef.current;
|
|
if (video == null) {
|
|
return;
|
|
}
|
|
|
|
video.addEventListener('timeupdate', () => {
|
|
setCurrentTime(video.currentTime);
|
|
});
|
|
})();
|
|
});
|
|
|
|
// fetch mediaset on page load:
|
|
useEffect(() => {
|
|
(async function () {
|
|
console.log('fetching media...');
|
|
|
|
const resp = await fetch(
|
|
`http://localhost:8888/api/media_sets/${videoID}`
|
|
);
|
|
const respBody = await resp.json();
|
|
|
|
if (respBody.error) {
|
|
console.log('error fetching media set:', respBody.error);
|
|
return;
|
|
}
|
|
|
|
const mediaSet: MediaSet = {
|
|
id: respBody.id,
|
|
source: respBody.source,
|
|
audio: {
|
|
sampleRate: respBody.audio.sample_rate,
|
|
bytes: respBody.audio.bytes,
|
|
frames: respBody.audio.frames,
|
|
channels: respBody.audio.channels,
|
|
},
|
|
video: {
|
|
bytes: respBody.video.bytes,
|
|
thumbnailWidth: respBody.video.thumbnail_width,
|
|
thumbnailHeight: respBody.video.thumbnail_height,
|
|
durationMillis: Math.floor(respBody.video.duration / 1000 / 1000),
|
|
},
|
|
};
|
|
|
|
setMediaSet(mediaSet);
|
|
setZoomSettings({ startFrame: 0, endFrame: mediaSet.audio.frames });
|
|
})();
|
|
}, [audioContext]);
|
|
|
|
// load video when MediaSet is loaded:
|
|
useEffect(() => {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
|
|
const video = videoRef.current;
|
|
if (video == null) {
|
|
return;
|
|
}
|
|
|
|
const url = `http://localhost:8888/api/media_sets/${videoID}/video`;
|
|
video.src = url;
|
|
video.muted = false;
|
|
video.volume = 1;
|
|
video.controls = true;
|
|
}, [mediaSet]);
|
|
|
|
// fetch new waveform peaks when zoom settings are updated:
|
|
useEffect(() => {
|
|
(async function () {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
|
|
let endFrame = zoomSettings.endFrame;
|
|
if (endFrame <= zoomSettings.startFrame) {
|
|
endFrame = mediaSet.audio.frames;
|
|
}
|
|
|
|
const resp = await fetch(
|
|
`http://localhost:8888/api/media_sets/${videoID}/peaks?start=${zoomSettings.startFrame}&end=${endFrame}&bins=${CanvasLogicalWidth}`
|
|
);
|
|
const peaks = await resp.json();
|
|
setWaveformPeaks(peaks);
|
|
|
|
if (overviewPeaks == null) {
|
|
setOverviewPeaks(peaks);
|
|
}
|
|
})();
|
|
}, [zoomSettings]);
|
|
|
|
// redraw HUD
|
|
useEffect(() => {
|
|
(async function () {
|
|
const canvas = hudCanvasRef.current;
|
|
if (canvas == null) {
|
|
return;
|
|
}
|
|
|
|
const ctx = canvas.getContext('2d');
|
|
if (ctx == null) {
|
|
console.error('no hud 2d context available');
|
|
return;
|
|
}
|
|
|
|
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
|
|
const x = secsToCanvasX(
|
|
currentTime,
|
|
mediaSet.audio.sampleRate,
|
|
mediaSet.audio.frames
|
|
);
|
|
|
|
ctx.strokeStyle = 'red';
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, 0);
|
|
ctx.lineTo(x, canvas.height);
|
|
ctx.stroke();
|
|
})();
|
|
}, [currentTime]);
|
|
|
|
// end of hook configuration.
|
|
// TODO: render loading page here.
|
|
if (mediaSet == null) {
|
|
return null;
|
|
}
|
|
|
|
// callbacks
|
|
|
|
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
const canvasX = mouseEventToCanvasX(evt);
|
|
console.log(
|
|
'mousemove, x =',
|
|
canvasX,
|
|
'frame =',
|
|
canvasXToFrame(canvasX, mediaSet.audio.frames)
|
|
);
|
|
};
|
|
|
|
const handleMouseDown = () => {
|
|
return null;
|
|
};
|
|
|
|
const handleMouseUp = () => {
|
|
return null;
|
|
};
|
|
|
|
const handlePlay = async () => {
|
|
const video = videoRef.current;
|
|
if (video == null) {
|
|
return;
|
|
}
|
|
await video.play();
|
|
};
|
|
|
|
const handlePause = () => {
|
|
const video = videoRef.current;
|
|
if (video == null) {
|
|
return;
|
|
}
|
|
|
|
video.pause();
|
|
console.log('paused video');
|
|
};
|
|
|
|
const handleZoomIn = () => {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
console.log('zoom in');
|
|
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
|
|
const endFrame = zoomSettings.startFrame + Math.floor(diff / 2);
|
|
const settings = { ...zoomSettings, endFrame: endFrame };
|
|
setZoomSettings(settings);
|
|
};
|
|
|
|
const handleZoomOut = () => {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
console.log('zoom out');
|
|
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
|
|
const newDiff = diff * 2;
|
|
const endFrame = Math.min(
|
|
zoomSettings.endFrame + newDiff,
|
|
mediaSet.audio.frames
|
|
);
|
|
const settings = { ...zoomSettings, endFrame: endFrame };
|
|
setZoomSettings(settings);
|
|
};
|
|
|
|
const handleSelectionChange = (selection: Selection) => {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
const settings: ZoomSettings = {
|
|
startFrame: canvasXToFrame(selection.x1, mediaSet.audio.frames),
|
|
endFrame: canvasXToFrame(selection.x2, mediaSet.audio.frames),
|
|
};
|
|
setZoomSettings(settings);
|
|
};
|
|
|
|
// render component:
|
|
|
|
const wrapperProps = {
|
|
width: '90%',
|
|
height: '250px',
|
|
position: 'relative',
|
|
margin: '0 auto',
|
|
} as React.CSSProperties;
|
|
|
|
const waveformCanvasProps = {
|
|
width: '100%',
|
|
height: '100%',
|
|
position: 'absolute',
|
|
top: 0,
|
|
left: 0,
|
|
right: 0,
|
|
bottom: 0,
|
|
zIndex: 0,
|
|
} as React.CSSProperties;
|
|
|
|
const hudCanvasProps = {
|
|
width: '100%',
|
|
height: '100%',
|
|
position: 'absolute',
|
|
top: 0,
|
|
left: 0,
|
|
right: 0,
|
|
bottom: 0,
|
|
zIndex: 1,
|
|
} as React.CSSProperties;
|
|
|
|
const overviewStyles = { ...wrapperProps, height: '60px' };
|
|
|
|
// TODO: why is the margin needed?
|
|
const controlPanelStyles = { margin: '1em' } as React.CSSProperties;
|
|
const clockTextAreaProps = { color: '#999', width: '400px' };
|
|
const videoStyles = {
|
|
width: '30%',
|
|
height: 'auto',
|
|
margin: '10px auto 0 auto',
|
|
zIndex: 2,
|
|
} as React.CSSProperties;
|
|
const thumbnailStyles = {
|
|
width: '90%',
|
|
height: '35px',
|
|
margin: '10px auto 0 auto',
|
|
display: 'block',
|
|
};
|
|
|
|
return (
|
|
<>
|
|
<video ref={videoRef} style={videoStyles}></video>
|
|
<Thumbnails mediaSet={mediaSet} style={thumbnailStyles} />
|
|
<WaveformOverview
|
|
peaks={overviewPeaks}
|
|
numFrames={mediaSet.audio.frames}
|
|
style={overviewStyles}
|
|
onSelectionChange={handleSelectionChange}
|
|
></WaveformOverview>
|
|
<div style={wrapperProps}>
|
|
<WaveformCanvas
|
|
peaks={waveformPeaks}
|
|
fillStyle="black"
|
|
strokeStyle="green"
|
|
style={waveformCanvasProps}
|
|
></WaveformCanvas>
|
|
<canvas
|
|
ref={hudCanvasRef}
|
|
onMouseMove={handleMouseMove}
|
|
onMouseDown={handleMouseDown}
|
|
onMouseUp={handleMouseUp}
|
|
style={hudCanvasProps}
|
|
width={CanvasLogicalWidth}
|
|
height={CanvasLogicalHeight}
|
|
></canvas>
|
|
</div>
|
|
<div style={controlPanelStyles}>
|
|
<button onClick={handlePlay}>Play</button>
|
|
<button onClick={handlePause}>Pause</button>
|
|
<button onClick={handleZoomIn}>+</button>
|
|
<button onClick={handleZoomOut}>-</button>
|
|
<input type="readonly" style={clockTextAreaProps} />
|
|
</div>
|
|
</>
|
|
);
|
|
};
|