clipper/frontend/src/App.tsx

429 lines
12 KiB
TypeScript
Raw Normal View History

2021-10-22 19:30:09 +00:00
import {
2021-11-02 16:20:47 +00:00
MediaSet,
GrpcWebImpl,
MediaSetServiceClientImpl,
GetVideoProgress,
GetPeaksProgress,
2021-11-02 16:20:47 +00:00
} from './generated/media_set';
2021-10-29 12:52:31 +00:00
import { useState, useEffect, useRef, useCallback } from 'react';
2021-12-29 15:38:25 +00:00
import { AudioFormat } from './generated/media_set';
2021-10-08 14:38:35 +00:00
import { VideoPreview } from './VideoPreview';
2021-11-29 17:44:31 +00:00
import { Overview, CanvasLogicalWidth } from './Overview';
2021-10-08 14:38:35 +00:00
import { Waveform } from './Waveform';
import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar';
2021-09-06 10:17:50 +00:00
import './App.css';
2021-11-30 19:41:34 +00:00
import { firstValueFrom, from, Observable } from 'rxjs';
import { first, map } from 'rxjs/operators';
import millisFromDuration from './helpers/millisFromDuration';
2021-09-06 14:25:23 +00:00
2021-11-02 16:20:47 +00:00
// ported from backend, where should they live?
const thumbnailWidth = 177;
const thumbnailHeight = 100;
2021-10-08 14:38:35 +00:00
2021-12-06 22:52:24 +00:00
const initialViewportCanvasPixels = 100;
const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888';
// Frames represents a range of audio frames.
2021-10-08 14:38:35 +00:00
export interface Frames {
start: number;
end: number;
}
2021-09-06 10:17:50 +00:00
2021-11-25 18:02:37 +00:00
export interface VideoPosition {
currentTime: number;
percent: number;
}
2021-11-30 19:41:34 +00:00
const video = document.createElement('video');
const audio = document.createElement('audio');
2021-09-30 19:08:48 +00:00
function App(): JSX.Element {
2021-10-08 14:38:35 +00:00
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [viewport, setViewport] = useState<Frames>({ start: 0, end: 0 });
const [selection, setSelection] = useState<Frames>({ start: 0, end: 0 });
2021-11-29 17:44:31 +00:00
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
from([])
);
2021-10-08 14:38:35 +00:00
2021-11-30 19:41:34 +00:00
// position stores the current playback position. positionRef makes it
// available inside a setInterval callback.
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
const positionRef = useRef(position);
positionRef.current = position;
2021-10-08 14:38:35 +00:00
// effects
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
2021-10-22 19:30:09 +00:00
if (videoID == null) {
return <></>;
}
2021-10-08 14:38:35 +00:00
// fetch mediaset on page load:
useEffect(() => {
(async function () {
const rpc = newRPC();
2021-11-02 16:20:47 +00:00
const service = new MediaSetServiceClientImpl(rpc);
const mediaSet = await service.Get({ youtubeId: videoID });
2021-10-22 19:30:09 +00:00
2021-10-29 12:52:31 +00:00
console.log('got media set:', mediaSet);
2021-11-02 16:20:47 +00:00
setMediaSet(mediaSet);
2021-10-08 14:38:35 +00:00
})();
}, []);
2021-11-30 19:41:34 +00:00
const updatePlayerPositionIntevalMillis = 30;
2021-10-08 14:38:35 +00:00
// setup player on first page load only:
useEffect(() => {
2021-11-25 18:02:37 +00:00
if (mediaSet == null) {
return;
}
2021-11-30 19:41:34 +00:00
const intervalID = setInterval(() => {
const currTime = audio.currentTime;
if (currTime == positionRef.current.currentTime) {
2021-11-25 18:02:37 +00:00
return;
}
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
const percent = (currTime / duration) * 100;
// check if the end of selection has been passed, and pause if so:
if (
currentTimeToFrame(position.currentTime) < selection.end &&
currentTimeToFrame(currTime) >= selection.end
) {
handlePause();
}
2021-11-25 18:02:37 +00:00
// update the current position
setPosition({ currentTime: audio.currentTime, percent: percent });
2021-11-30 19:41:34 +00:00
}, updatePlayerPositionIntevalMillis);
return () => clearInterval(intervalID);
}, [mediaSet, selection]);
// bind to keypress handler.
// selection is a dependency of the handleKeyPress handler, and must be
// included here.
useEffect(() => {
document.addEventListener('keypress', handleKeyPress);
return () => document.removeEventListener('keypress', handleKeyPress);
}, [selection]);
2021-10-08 14:38:35 +00:00
2021-11-29 17:44:31 +00:00
// load audio when MediaSet is loaded:
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
console.log('fetching audio...');
const service = new MediaSetServiceClientImpl(newRPC());
const audioProgressStream = service.GetPeaks({
2021-11-29 17:44:31 +00:00
id: mediaSet.id,
numBins: CanvasLogicalWidth,
});
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
setOverviewPeaks(peaks);
2021-11-30 19:41:34 +00:00
const pipe = audioProgressStream.pipe(
first((progress: GetPeaksProgress) => progress.url != '')
2021-11-30 19:41:34 +00:00
);
const progressWithURL = await firstValueFrom(pipe);
2021-11-29 17:44:31 +00:00
2021-11-30 19:41:34 +00:00
audio.src = progressWithURL.url;
2021-11-29 17:44:31 +00:00
audio.muted = false;
audio.volume = 1;
2021-11-30 19:41:34 +00:00
console.log('set audio src', progressWithURL.url);
2021-11-29 17:44:31 +00:00
})();
}, [mediaSet]);
2021-10-08 14:38:35 +00:00
// load video when MediaSet is loaded:
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
2021-11-02 16:20:47 +00:00
2021-11-30 19:41:34 +00:00
console.log('fetching video...');
const service = new MediaSetServiceClientImpl(newRPC());
const videoProgressStream = service.GetVideo({ id: mediaSet.id });
2021-11-30 19:41:34 +00:00
const pipe = videoProgressStream.pipe(
first((progress: GetVideoProgress) => progress.url != '')
);
const progressWithURL = await firstValueFrom(pipe);
2021-11-30 19:41:34 +00:00
video.src = progressWithURL.url;
console.log('set video src', progressWithURL.url);
})();
2021-10-08 14:38:35 +00:00
}, [mediaSet]);
// set viewport when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
const numFrames = Math.min(
2021-12-06 22:52:24 +00:00
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
initialViewportCanvasPixels,
mediaSet.audioFrames
);
setViewport({ start: 0, end: numFrames });
2021-10-08 14:38:35 +00:00
}, [mediaSet]);
useEffect(() => {
console.debug('viewport updated', viewport);
}, [viewport]);
// handlers
const handleKeyPress = useCallback(
(evt: KeyboardEvent) => {
if (evt.code != 'Space') {
return;
}
2021-11-30 19:41:34 +00:00
if (audio.paused) {
handlePlay();
} else {
handlePause();
}
},
[selection]
);
// handler called when the selection in the overview (zoom setting) is changed.
const handleOverviewSelectionChange = useCallback(
(newViewport: Frames) => {
if (mediaSet == null) {
return;
}
console.log('set new viewport', newViewport);
setViewport({ ...newViewport });
2021-10-08 14:38:35 +00:00
if (!audio.paused) {
return;
}
setPositionFromFrame(newViewport.start);
},
2021-12-13 04:10:07 +00:00
[mediaSet, audio, video, selection]
);
// handler called when the selection in the main waveform view is changed.
const handleWaveformSelectionChange = useCallback(
2021-12-13 04:10:07 +00:00
(newSelection: Frames) => {
setSelection(newSelection);
if (mediaSet == null) {
return;
}
// move playback position to start of selection
2021-12-13 04:10:07 +00:00
const ratio = newSelection.start / mediaSet.audioFrames;
const currentTime =
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
audio.currentTime = currentTime;
video.currentTime = currentTime;
},
2021-12-13 04:10:07 +00:00
[mediaSet, audio, video, selection]
);
2021-10-08 14:38:35 +00:00
const handlePlay = useCallback(() => {
audio.play();
video.play();
}, [audio, video]);
const handlePause = useCallback(() => {
video.pause();
audio.pause();
if (selection.start != selection.end) {
setPositionFromFrame(selection.start);
}
}, [audio, video, selection]);
2021-12-29 15:38:25 +00:00
const handleClip = useCallback(() => {
(async function () {
console.debug('clip', selection);
if (mediaSet == null) {
return;
}
// TODO: support File System Access API fallback
const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' });
const fileStream = await h.createWritable();
const rpc = newRPC();
const service = new MediaSetServiceClientImpl(rpc);
const stream = service.GetAudioSegment({
id: mediaSet.id,
format: AudioFormat.MP3,
startFrame: selection.start,
endFrame: selection.end,
});
await stream.forEach((p) => fileStream.write(p.audioData));
console.debug('finished writing stream');
await fileStream.close();
console.debug('closed stream');
})();
}, [mediaSet, selection]);
2022-01-07 18:51:29 +00:00
const handleZoomIn = useCallback(() => {
if (mediaSet == null) {
return;
}
if (viewport.start == viewport.end) {
return;
}
setViewport({
...viewport,
end: viewport.end - Math.round((viewport.end - viewport.start) / 2),
});
}, [mediaSet, viewport]);
const handleZoomOut = useCallback(() => {
if (mediaSet == null) {
return;
}
if (viewport.start == viewport.end) {
return;
}
let end = viewport.end + Math.round(viewport.end - viewport.start);
if (end > mediaSet.audioFrames) {
end = mediaSet.audioFrames;
}
setViewport({
...viewport,
end: end,
});
}, [mediaSet, viewport]);
const setPositionFromFrame = useCallback(
(frame: number) => {
if (mediaSet == null) {
return;
}
const ratio = frame / mediaSet.audioFrames;
const currentTime =
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
audio.currentTime = currentTime;
video.currentTime = currentTime;
},
[mediaSet, audio, video]
);
// helpers
const currentTimeToFrame = useCallback(
(currentTime: number): number => {
if (mediaSet == null) {
return 0;
}
const dur = mediaSet.audioFrames / mediaSet.audioSampleRate;
const ratio = currentTime / dur;
return Math.round(mediaSet.audioFrames * ratio);
},
[mediaSet]
);
2021-10-08 14:38:35 +00:00
// render component
const containerStyles = {
border: '1px solid black',
width: '90%',
margin: '1em auto',
minHeight: '500px',
height: '700px',
display: 'flex',
flexDirection: 'column',
} as React.CSSProperties;
2021-11-02 16:20:47 +00:00
const offsetPixels = Math.floor(thumbnailWidth / 2);
2021-10-08 14:38:35 +00:00
if (mediaSet == null) {
// TODO: improve
return <></>;
}
2021-09-06 10:17:50 +00:00
return (
2021-10-08 14:38:35 +00:00
<>
<div className="App">
<div style={containerStyles}>
2021-12-29 15:38:25 +00:00
<ControlBar
onPlay={handlePlay}
onPause={handlePause}
onClip={handleClip}
2022-01-07 18:51:29 +00:00
onZoomIn={handleZoomIn}
onZoomOut={handleZoomOut}
2021-12-29 15:38:25 +00:00
/>
2021-10-08 14:38:35 +00:00
<Overview
2021-11-29 17:44:31 +00:00
peaks={overviewPeaks}
2021-10-08 14:38:35 +00:00
mediaSet={mediaSet}
offsetPixels={offsetPixels}
height={80}
2021-11-30 19:41:34 +00:00
viewport={viewport}
2021-10-08 14:38:35 +00:00
position={position}
onSelectionChange={handleOverviewSelectionChange}
/>
<Waveform
mediaSet={mediaSet}
position={position}
viewport={viewport}
offsetPixels={offsetPixels}
onSelectionChange={handleWaveformSelectionChange}
2021-10-08 14:38:35 +00:00
/>
<SeekBar
position={video.currentTime}
2021-11-02 16:20:47 +00:00
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
2021-10-08 14:38:35 +00:00
offsetPixels={offsetPixels}
onPositionChanged={(position: number) => {
video.currentTime = position;
2021-11-29 17:44:31 +00:00
audio.currentTime = position;
2021-10-08 14:38:35 +00:00
}}
/>
<VideoPreview
2021-11-21 19:43:40 +00:00
mediaSet={mediaSet}
2021-10-08 14:38:35 +00:00
video={video}
position={position}
2021-11-02 16:20:47 +00:00
duration={millisFromDuration(mediaSet.videoDuration)}
height={thumbnailHeight}
2021-10-08 14:38:35 +00:00
/>
</div>
2021-12-11 16:25:30 +00:00
<ul style={{ listStyleType: 'none' } as React.CSSProperties}>
<li>Frames: {mediaSet.audioFrames}</li>
<li>
Viewport (frames): {viewport.start} to {viewport.end}
</li>
<li>
Selection (frames): {selection.start} to {selection.end}
</li>
<li>
Position (frames):{' '}
{Math.round(mediaSet.audioFrames * (position.percent / 100))}
</li>
<li>Position (seconds): {position.currentTime}</li>
<li></li>
</ul>
2021-10-08 14:38:35 +00:00
</div>
</>
2021-09-06 10:17:50 +00:00
);
}
export default App;
2021-11-02 16:20:47 +00:00
export function newRPC(): GrpcWebImpl {
return new GrpcWebImpl(apiURL, {});
}