clipper/frontend/src/App.tsx

423 lines
13 KiB
TypeScript

import {
GrpcWebImpl,
MediaSetServiceClientImpl,
GetVideoProgress,
GetPeaksProgress,
} from './generated/media_set';
import { useEffect, useCallback, useReducer } from 'react';
import { State, stateReducer, zoomFactor } from './AppState';
import { AudioFormat } from './generated/media_set';
import { VideoPreview } from './VideoPreview';
import { WaveformCanvas } from './WaveformCanvas';
import { HudCanvas } from './HudCanvas';
import { Player, PlayState } from './Player';
import {
CanvasLogicalWidth,
CanvasLogicalHeight,
EmptySelectionAction,
} from './HudCanvasState';
import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar';
import { firstValueFrom, from, Observable } from 'rxjs';
import { first, map, bufferCount } from 'rxjs/operators';
import millisFromDuration from './helpers/millisFromDuration';
import { canZoomViewportIn, canZoomViewportOut } from './helpers/zoom';
import toHHMMSS from './helpers/toHHMMSS';
import framesToDuration from './helpers/framesToDuration';
import frameToWaveformCanvasX from './helpers/frameToWaveformCanvasX';
import { ClockIcon, ExternalLinkIcon } from '@heroicons/react/solid';
// ported from backend, where should they live?
const thumbnailWidth = 177; // height 100
const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888';
// Frames represents a range of audio frames.
export interface Frames {
start: number;
end: number;
}
export interface VideoPosition {
currentTime: number;
percent: number;
}
const initialState: State = {
selection: { start: 0, end: 0 },
viewport: { start: 0, end: 0 },
overviewPeaks: from([]),
waveformPeaks: from([]),
selectionCanvas: { x1: 0, x2: 0 },
viewportCanvas: { x1: 0, x2: 0 },
position: { currentTime: 0, frame: 0, percent: 0 },
audioSrc: '',
videoSrc: '',
currentTime: 0,
playState: PlayState.Paused,
};
function App(): JSX.Element {
const [state, dispatch] = useReducer(stateReducer, { ...initialState });
const {
mediaSet,
waveformPeaks,
overviewPeaks,
selection,
selectionCanvas,
viewport,
viewportCanvas,
position,
playState,
} = state;
// effects
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
if (videoID == null) {
return <></>;
}
// fetch mediaset on page load:
useEffect(() => {
(async function () {
const rpc = newRPC();
const service = new MediaSetServiceClientImpl(rpc);
const mediaSet = await service.Get({ youtubeId: videoID });
console.log('got media set:', mediaSet);
dispatch({ type: 'mediasetloaded', mediaSet: mediaSet });
// fetch audio asynchronously
console.log('fetching audio...');
const audioProgressStream = service.GetPeaks({
id: mediaSet.id,
numBins: CanvasLogicalWidth,
});
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
dispatch({ type: 'overviewpeaksloaded', peaks: peaks });
const audioPipe = audioProgressStream.pipe(
first((progress: GetPeaksProgress) => progress.url != '')
);
const fetchAudioTask = firstValueFrom(audioPipe);
// fetch video asynchronously
console.log('fetching video...');
const videoProgressStream = service.GetVideo({ id: mediaSet.id });
const videoPipe = videoProgressStream.pipe(
first((progress: GetVideoProgress) => progress.url != '')
);
const fetchVideoTask = firstValueFrom(videoPipe);
// wait for both audio, then video.
const audioProgress = await fetchAudioTask;
dispatch({
type: 'audiosourceloaded',
src: audioProgress.url,
numFrames: audioProgress.audioFrames,
});
const videoProgress = await fetchVideoTask;
dispatch({ type: 'videosourceloaded', src: videoProgress.url });
})();
}, []);
// load waveform peaks on MediaSet change
useEffect(() => {
(async function () {
const { mediaSet, viewport } = state;
if (mediaSet == null) {
return;
}
if (viewport.start >= viewport.end) {
return;
}
const service = new MediaSetServiceClientImpl(newRPC());
const segment = await service.GetPeaksForSegment({
id: mediaSet.id,
numBins: CanvasLogicalWidth,
startFrame: viewport.start,
endFrame: viewport.end,
});
console.log('got segment', segment);
const peaks: Observable<number[]> = from(segment.peaks).pipe(
bufferCount(mediaSet.audioChannels)
);
dispatch({ type: 'waveformpeaksloaded', peaks: peaks });
})();
}, [viewport, mediaSet]);
// bind to keypress handler.
useEffect(() => {
document.addEventListener('keypress', handleKeyPress);
return () => document.removeEventListener('keypress', handleKeyPress);
});
useEffect(() => {
console.debug('viewport updated', viewport);
}, [viewport]);
// handlers
const togglePlay = () => (playState == PlayState.Paused ? play() : pause());
const play = () => dispatch({ type: 'play' });
const pause = () => dispatch({ type: 'pause' });
const handleKeyPress = (evt: KeyboardEvent) => {
if (evt.code != 'Space') {
return;
}
togglePlay();
};
const handleClip = () => {
if (!window.showSaveFilePicker) {
downloadClipHTTP();
return;
}
downloadClipFileSystemAccessAPI();
};
const downloadClipHTTP = () => {
(async function () {
if (mediaSet == null) {
return;
}
console.debug('clip http', selection);
const form = document.createElement('form');
form.method = 'POST';
form.action = `${apiURL}/api/media_sets/${mediaSet.id}/clip`;
const startFrameInput = document.createElement('input');
startFrameInput.type = 'hidden';
startFrameInput.name = 'start_frame';
startFrameInput.value = String(selection.start);
form.appendChild(startFrameInput);
const endFrameInput = document.createElement('input');
endFrameInput.type = 'hidden';
endFrameInput.name = 'end_frame';
endFrameInput.value = String(selection.end);
form.appendChild(endFrameInput);
const formatInput = document.createElement('input');
formatInput.type = 'hidden';
formatInput.name = 'format';
formatInput.value = 'mp3';
form.appendChild(formatInput);
document.body.appendChild(form);
form.submit();
})();
};
const downloadClipFileSystemAccessAPI = () => {
(async function () {
if (mediaSet == null) {
return;
}
console.debug('clip grpc', selection);
const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' });
const fileStream = await h.createWritable();
const rpc = newRPC();
const service = new MediaSetServiceClientImpl(rpc);
const stream = service.GetAudioSegment({
id: mediaSet.id,
format: AudioFormat.MP3,
startFrame: selection.start,
endFrame: selection.end,
});
await stream.forEach((p) => fileStream.write(p.audioData));
console.debug('finished writing stream');
await fileStream.close();
console.debug('closed stream');
})();
};
const durationString = useCallback((): string => {
if (!mediaSet || !mediaSet.videoDuration) {
return '';
}
const { selection } = state;
const totalDur = toHHMMSS(mediaSet.videoDuration);
if (selection.start == selection.end) {
return totalDur;
}
const clipDur = toHHMMSS(
framesToDuration(
selection.end - selection.start,
mediaSet.audioSampleRate
)
);
return `Selected ${clipDur} of ${totalDur}`;
}, [mediaSet, selection]);
// render component
const offsetPixels = Math.floor(thumbnailWidth / 2);
const marginClass = 'mx-[88px]'; // offsetPixels
if (mediaSet == null) {
// TODO: improve
return <></>;
}
return (
<>
<Player
playState={playState}
audioSrc={state.audioSrc}
videoSrc={state.videoSrc}
currentTime={state.currentTime}
onPositionChanged={(currentTime) =>
dispatch({ type: 'positionchanged', currentTime: currentTime })
}
/>
<div className="App bg-gray-800 h-screen flex flex-col">
<header className="bg-green-900 h-16 grow-0 flex items-center mb-12 px-[88px]">
<h1 className="text-3xl font-bold">Clipper</h1>
</header>
<div className="flex flex-col grow bg-gray-800 w-full h-full mx-auto">
<div className={`flex flex-col grow ${marginClass}`}>
<div className="flex grow-0 h-8 pt-4 pb-2 items-center space-x-2 text-white">
<span className="text-gray-300">{mediaSet.author}</span>
<span>/</span>
<span>{mediaSet.title}</span>
<a
href={`https://www.youtube.com/watch?v=${mediaSet.youtubeId}`}
target="_blank"
rel="noreferrer"
title="Open in YouTube"
>
<ExternalLinkIcon className="h-6 w-6 text-gray-500 hover:text-gray-200" />
</a>
<span className="flex grow justify-end text-gray-500">
<ClockIcon className="h-5 w-5 mr-1 mt-0.5" />
{durationString()}
</span>
</div>
<ControlBar
playState={playState}
zoomInEnabled={canZoomViewportIn(viewport, selection, zoomFactor)}
zoomOutEnabled={canZoomViewportOut(
viewport,
mediaSet.audioFrames
)}
onTogglePlay={togglePlay}
onClip={handleClip}
onZoomIn={() => dispatch({ type: 'zoomin' })}
onZoomOut={() => dispatch({ type: 'zoomout' })}
downloadClipEnabled={selection.start != selection.end}
/>
<div className="w-full bg-gray-600 h-6"></div>
<div className={`relative grow-0 h-16`}>
<WaveformCanvas
peaks={overviewPeaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="black"
fillStyle="#003300"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectPrevious}
styles={{
borderLineWidth: 4,
borderStrokeStyle: 'red',
positionLineWidth: 4,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: 'transparent',
}}
position={(position.percent / 100) * CanvasLogicalWidth}
selection={viewportCanvas}
onSelectionChange={(event) =>
dispatch({ type: 'viewportchanged', event })
}
/>
</div>
<div className={`relative grow`}>
<WaveformCanvas
peaks={waveformPeaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="green"
fillStyle="black"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectNothing}
styles={{
borderLineWidth: 0,
borderStrokeStyle: 'transparent',
positionLineWidth: 6,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: '#666666',
}}
position={frameToWaveformCanvasX(
position.frame,
viewport,
CanvasLogicalWidth
)}
selection={selectionCanvas}
onSelectionChange={(event) =>
dispatch({
type: 'waveformselectionchanged',
event,
})
}
/>
</div>
</div>
<SeekBar
position={position.currentTime}
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
offsetPixels={offsetPixels}
onPositionChanged={(currentTime: number) => {
dispatch({ type: 'skip', currentTime });
}}
/>
<VideoPreview
mediaSet={mediaSet}
video={document.createElement('video')}
position={position}
duration={millisFromDuration(mediaSet.videoDuration)}
/>
</div>
</div>
</>
);
}
export default App;
export function newRPC(): GrpcWebImpl {
return new GrpcWebImpl(apiURL, {});
}