clipper/frontend/src/App.tsx

529 lines
15 KiB
TypeScript

import {
MediaSet,
GrpcWebImpl,
MediaSetServiceClientImpl,
GetVideoProgress,
GetPeaksProgress,
} from './generated/media_set';
import { useState, useEffect, useRef, useCallback } from 'react';
import { AudioFormat } from './generated/media_set';
import { VideoPreview } from './VideoPreview';
import { Overview, CanvasLogicalWidth } from './Overview';
import { Waveform } from './Waveform';
import { SelectionChangeEvent } from './HudCanvas';
import { Selection, SelectionMode } from './HudCanvasState';
import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar';
import { firstValueFrom, from, Observable } from 'rxjs';
import { first, map } from 'rxjs/operators';
import millisFromDuration from './helpers/millisFromDuration';
import {
canZoomViewportIn,
canZoomViewportOut,
zoomViewportIn,
zoomViewportOut,
} from './helpers/zoom';
import toHHMMSS from './helpers/toHHMMSS';
import framesToDuration from './helpers/framesToDuration';
import { ClockIcon, ExternalLinkIcon } from '@heroicons/react/solid';
// ported from backend, where should they live?
const thumbnailWidth = 177; // height 100
const initialViewportCanvasPixels = 100;
const zoomFactor = 2;
const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888';
// Frames represents a range of audio frames.
export interface Frames {
start: number;
end: number;
}
export interface VideoPosition {
currentTime: number;
percent: number;
}
export enum PlayState {
Paused,
Playing,
}
const video = document.createElement('video');
const audio = document.createElement('audio');
function App(): JSX.Element {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [viewport, setViewport] = useState<Frames>({ start: 0, end: 0 });
const [selection, setSelection] = useState<Frames>({ start: 0, end: 0 });
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
from([])
);
const [playState, setPlayState] = useState(PlayState.Paused);
// position stores the current playback position. positionRef makes it
// available inside a setInterval callback.
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
const positionRef = useRef(position);
positionRef.current = position;
// effects
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
if (videoID == null) {
return <></>;
}
// fetch mediaset on page load:
useEffect(() => {
(async function () {
const rpc = newRPC();
const service = new MediaSetServiceClientImpl(rpc);
const mediaSet = await service.Get({ youtubeId: videoID });
console.log('got media set:', mediaSet);
setMediaSet(mediaSet);
// fetch audio asynchronously
console.log('fetching audio...');
const audioProgressStream = service.GetPeaks({
id: mediaSet.id,
numBins: CanvasLogicalWidth,
});
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
setOverviewPeaks(peaks);
const audioPipe = audioProgressStream.pipe(
first((progress: GetPeaksProgress) => progress.url != '')
);
const fetchAudioTask = firstValueFrom(audioPipe);
// fetch video asynchronously
console.log('fetching video...');
const videoProgressStream = service.GetVideo({ id: mediaSet.id });
const videoPipe = videoProgressStream.pipe(
first((progress: GetVideoProgress) => progress.url != '')
);
const fetchVideoTask = firstValueFrom(videoPipe);
// wait for both audio, then video.
const audioProgress = await fetchAudioTask;
audio.src = audioProgress.url;
audio.muted = false;
audio.volume = 1;
console.log('set audio src', audioProgress.url);
setMediaSet({ ...mediaSet, audioFrames: audioProgress.audioFrames });
const videoProgress = await fetchVideoTask;
video.src = videoProgress.url;
console.log('set video src', videoProgress.url);
})();
}, []);
const updatePlayerPositionIntevalMillis = 20;
// setup player on first page load only:
useEffect(() => {
if (mediaSet == null) {
return;
}
const intervalID = setInterval(() => {
const currTime = audio.currentTime;
if (currTime == positionRef.current.currentTime) {
return;
}
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
const percent = (currTime / duration) * 100;
// check if the end of selection has been passed, and pause if so:
if (
selection.start != selection.end &&
currentTimeToFrame(positionRef.current.currentTime) < selection.end &&
currentTimeToFrame(currTime) >= selection.end
) {
pause();
}
// update the current position
setPosition({ currentTime: audio.currentTime, percent: percent });
}, updatePlayerPositionIntevalMillis);
return () => clearInterval(intervalID);
}, [mediaSet, selection]);
// bind to keypress handler.
// selection is a dependency of the handleKeyPress handler, and must be
// included here.
useEffect(() => {
document.addEventListener('keypress', handleKeyPress);
return () => document.removeEventListener('keypress', handleKeyPress);
}, [selection, playState]);
// set viewport when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
const numFrames = Math.min(
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
initialViewportCanvasPixels,
mediaSet.audioFrames
);
setViewport({ start: 0, end: numFrames });
}, [mediaSet]);
useEffect(() => {
console.debug('viewport updated', viewport);
}, [viewport]);
// handlers
const handleKeyPress = (evt: KeyboardEvent) => {
if (evt.code != 'Space') {
return;
}
togglePlay();
};
// handler called when the selection in the overview (zoom setting) is changed.
const handleOverviewSelectionChange = ({
selection: newViewport,
}: SelectionChangeEvent) => {
if (mediaSet == null) {
return;
}
console.log('set new viewport', newViewport);
setViewport({ ...newViewport });
if (!audio.paused) {
return;
}
setPositionFromFrame(newViewport.start);
};
const setPositionAfterSelectionChange = (
newSelection: Selection,
mode: SelectionMode,
prevMode: SelectionMode
): boolean => {
// if creating a new selection from scratch, reset position on mouseup.
if (prevMode == SelectionMode.Selecting && mode == SelectionMode.Normal) {
return true;
}
// if re-entering normal mode, reset position if the current position is
// outside the new selection on mouseup.
if (prevMode != SelectionMode.Normal && mode == SelectionMode.Normal) {
const currFrame = currentTimeToFrame(positionRef.current.currentTime);
if (currFrame < newSelection.start || currFrame > newSelection.end) {
return true;
}
}
return false;
};
// handler called when the selection in the main waveform view is changed.
const handleWaveformSelectionChange = ({
selection: newSelection,
mode,
prevMode,
}: SelectionChangeEvent) => {
setSelection(newSelection);
if (setPositionAfterSelectionChange(newSelection, mode, prevMode)) {
setPositionFromFrame(newSelection.start);
}
};
const togglePlay = () => {
if (playState == PlayState.Paused) {
play();
} else {
pause();
}
};
const play = () => {
audio.play();
video.play();
setPlayState(PlayState.Playing);
};
const pause = () => {
video.pause();
audio.pause();
setPositionFromFrame(selection.start);
setPlayState(PlayState.Paused);
};
const handleClip = () => {
if (!window.showSaveFilePicker) {
downloadClipHTTP();
return;
}
downloadClipFileSystemAccessAPI();
};
const downloadClipHTTP = () => {
(async function () {
if (mediaSet == null) {
return;
}
console.debug('clip http', selection);
const form = document.createElement('form');
form.method = 'POST';
form.action = `${apiURL}/api/media_sets/${mediaSet.id}/clip`;
const startFrameInput = document.createElement('input');
startFrameInput.type = 'hidden';
startFrameInput.name = 'start_frame';
startFrameInput.value = String(selection.start);
form.appendChild(startFrameInput);
const endFrameInput = document.createElement('input');
endFrameInput.type = 'hidden';
endFrameInput.name = 'end_frame';
endFrameInput.value = String(selection.end);
form.appendChild(endFrameInput);
const formatInput = document.createElement('input');
formatInput.type = 'hidden';
formatInput.name = 'format';
formatInput.value = 'mp3';
form.appendChild(formatInput);
document.body.appendChild(form);
form.submit();
})();
};
const downloadClipFileSystemAccessAPI = () => {
(async function () {
if (mediaSet == null) {
return;
}
console.debug('clip grpc', selection);
const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' });
const fileStream = await h.createWritable();
const rpc = newRPC();
const service = new MediaSetServiceClientImpl(rpc);
const stream = service.GetAudioSegment({
id: mediaSet.id,
format: AudioFormat.MP3,
startFrame: selection.start,
endFrame: selection.end,
});
await stream.forEach((p) => fileStream.write(p.audioData));
console.debug('finished writing stream');
await fileStream.close();
console.debug('closed stream');
})();
};
const handleZoomIn = () => {
if (mediaSet == null) {
return;
}
const newViewport = zoomViewportIn(
viewport,
mediaSet.audioFrames,
selection,
currentTimeToFrame(positionRef.current.currentTime),
zoomFactor
);
setViewport(newViewport);
};
const handleZoomOut = () => {
if (mediaSet == null) {
return;
}
const newViewport = zoomViewportOut(
viewport,
mediaSet.audioFrames,
selection,
currentTimeToFrame(positionRef.current.currentTime),
zoomFactor
);
setViewport(newViewport);
};
const setPositionFromFrame = useCallback(
(frame: number) => {
if (mediaSet == null) {
return;
}
const ratio = frame / mediaSet.audioFrames;
const currentTime =
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
audio.currentTime = currentTime;
video.currentTime = currentTime;
},
[mediaSet]
);
// helpers
const currentTimeToFrame = useCallback(
(currentTime: number): number => {
if (mediaSet == null) {
return 0;
}
const dur = mediaSet.audioFrames / mediaSet.audioSampleRate;
const ratio = currentTime / dur;
return Math.round(mediaSet.audioFrames * ratio);
},
[mediaSet]
);
const durationString = useCallback((): string => {
if (!mediaSet || !mediaSet.videoDuration) {
return '';
}
const totalDur = toHHMMSS(mediaSet.videoDuration);
if (selection.start == selection.end) {
return totalDur;
}
const clipDur = toHHMMSS(
framesToDuration(
selection.end - selection.start,
mediaSet.audioSampleRate
)
);
return `Selected ${clipDur} of ${totalDur}`;
}, [mediaSet, selection]);
// render component
const offsetPixels = Math.floor(thumbnailWidth / 2);
const marginClass = 'mx-[88px]'; // offsetPixels
if (mediaSet == null) {
// TODO: improve
return <></>;
}
return (
<>
<div className="App bg-gray-800 h-screen flex flex-col">
<header className="bg-green-900 h-16 grow-0 flex items-center mb-12 px-[88px]">
<h1 className="text-3xl font-bold">Clipper</h1>
</header>
<div className="flex flex-col grow bg-gray-800 w-full h-full mx-auto">
<div className={`flex flex-col grow ${marginClass}`}>
<div className="flex grow-0 h-8 pt-4 pb-2 items-center space-x-2 text-white">
<span className="text-gray-300">{mediaSet.author}</span>
<span>/</span>
<span>{mediaSet.title}</span>
<a
href={`https://www.youtube.com/watch?v=${mediaSet.youtubeId}`}
target="_blank"
rel="noreferrer"
title="Open in YouTube"
>
<ExternalLinkIcon className="h-6 w-6 text-gray-500 hover:text-gray-200" />
</a>
<span className="flex grow justify-end text-gray-500">
<ClockIcon className="h-5 w-5 mr-1 mt-0.5" />
{durationString()}
</span>
</div>
<ControlBar
playState={playState}
zoomInEnabled={canZoomViewportIn(viewport, selection, zoomFactor)}
zoomOutEnabled={canZoomViewportOut(
viewport,
mediaSet.audioFrames
)}
onTogglePlay={togglePlay}
onClip={handleClip}
onZoomIn={handleZoomIn}
onZoomOut={handleZoomOut}
downloadClipEnabled={selection.start != selection.end}
/>
<div className="w-full bg-gray-600 h-6"></div>
<Overview
peaks={overviewPeaks}
mediaSet={mediaSet}
viewport={viewport}
position={position}
onSelectionChange={handleOverviewSelectionChange}
/>
<Waveform
mediaSet={mediaSet}
position={position}
viewport={viewport}
onSelectionChange={handleWaveformSelectionChange}
/>
</div>
<SeekBar
position={video.currentTime}
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
offsetPixels={offsetPixels}
onPositionChanged={(position: number) => {
video.currentTime = position;
audio.currentTime = position;
}}
/>
<VideoPreview
mediaSet={mediaSet}
video={video}
position={position}
duration={millisFromDuration(mediaSet.videoDuration)}
/>
</div>
<ul className="hidden">
<li>Frames: {mediaSet.audioFrames}</li>
<li>
Viewport (frames): {viewport.start} to {viewport.end}
</li>
<li>
Selection (frames): {selection.start} to {selection.end}
</li>
<li>
Position (frames):{' '}
{Math.round(mediaSet.audioFrames * (position.percent / 100))}
</li>
<li>Position (seconds): {position.currentTime}</li>
<li></li>
</ul>
</div>
</>
);
}
export default App;
export function newRPC(): GrpcWebImpl {
return new GrpcWebImpl(apiURL, {});
}