import { MediaSet, GrpcWebImpl, MediaSetServiceClientImpl, GetVideoProgress, GetAudioProgress, } from './generated/media_set'; import { useState, useEffect, useRef, useCallback } from 'react'; import { VideoPreview } from './VideoPreview'; import { Overview, CanvasLogicalWidth } from './Overview'; import { Waveform } from './Waveform'; import { ControlBar } from './ControlBar'; import { SeekBar } from './SeekBar'; import './App.css'; import { Duration } from './generated/google/protobuf/duration'; import { firstValueFrom, from, Observable } from 'rxjs'; import { first, map } from 'rxjs/operators'; // ported from backend, where should they live? const thumbnailWidth = 177; const thumbnailHeight = 100; const initialViewportCanvasPixels = 100; const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888'; // Frames represents a selection of audio frames. export interface Frames { start: number; end: number; } export interface VideoPosition { currentTime: number; percent: number; } const video = document.createElement('video'); const audio = document.createElement('audio'); function App(): JSX.Element { const [mediaSet, setMediaSet] = useState(null); const [viewport, setViewport] = useState({ start: 0, end: 0 }); const [overviewPeaks, setOverviewPeaks] = useState>( from([]) ); // position stores the current playback position. positionRef makes it // available inside a setInterval callback. const [position, setPosition] = useState({ currentTime: 0, percent: 0 }); const positionRef = useRef(position); positionRef.current = position; // effects // TODO: error handling const videoID = new URLSearchParams(window.location.search).get('video_id'); if (videoID == null) { return <>; } // fetch mediaset on page load: useEffect(() => { (async function () { const rpc = newRPC(); const service = new MediaSetServiceClientImpl(rpc); const mediaSet = await service.Get({ youtubeId: videoID }); console.log('got media set:', mediaSet); setMediaSet(mediaSet); })(); }, []); const updatePlayerPositionIntevalMillis = 30; // setup player on first page load only: useEffect(() => { if (mediaSet == null) { return; } const intervalID = setInterval(() => { if (video.currentTime == positionRef.current.currentTime) { return; } const duration = mediaSet.audioFrames / mediaSet.audioSampleRate; const percent = (video.currentTime / duration) * 100; setPosition({ currentTime: video.currentTime, percent: percent }); }, updatePlayerPositionIntevalMillis); return () => clearInterval(intervalID); }, [mediaSet]); // load audio when MediaSet is loaded: useEffect(() => { (async function () { if (mediaSet == null) { return; } console.log('fetching audio...'); const service = new MediaSetServiceClientImpl(newRPC()); const audioProgressStream = service.GetAudio({ id: mediaSet.id, numBins: CanvasLogicalWidth, }); const peaks = audioProgressStream.pipe(map((progress) => progress.peaks)); setOverviewPeaks(peaks); const pipe = audioProgressStream.pipe( first((progress: GetAudioProgress) => progress.url != '') ); const progressWithURL = await firstValueFrom(pipe); audio.src = progressWithURL.url; audio.muted = false; audio.volume = 1; console.log('set audio src', progressWithURL.url); })(); }, [mediaSet]); // load video when MediaSet is loaded: useEffect(() => { (async function () { if (mediaSet == null) { return; } console.log('fetching video...'); const service = new MediaSetServiceClientImpl(newRPC()); const videoProgressStream = service.GetVideo({ id: mediaSet.id }); const pipe = videoProgressStream.pipe( first((progress: GetVideoProgress) => progress.url != '') ); const progressWithURL = await firstValueFrom(pipe); video.src = progressWithURL.url; console.log('set video src', progressWithURL.url); })(); }, [mediaSet]); // set viewport when MediaSet is loaded: useEffect(() => { if (mediaSet == null) { return; } const numFrames = Math.min( Math.round(mediaSet.audioFrames / CanvasLogicalWidth) * initialViewportCanvasPixels, mediaSet.audioFrames ); setViewport({ start: 0, end: numFrames }); }, [mediaSet]); useEffect(() => { console.debug('viewport updated', viewport); }, [viewport]); // handlers const handleOverviewSelectionChange = (newViewport: Frames) => { if (mediaSet == null) { return; } console.log('set new viewport', newViewport); setViewport({ ...newViewport }); if (!audio.paused) { return; } const ratio = newViewport.start / mediaSet.audioFrames; const currentTime = (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; audio.currentTime = currentTime; video.currentTime = currentTime; }; const handlePlay = useCallback(() => { audio.play(); video.play(); }, [audio, video]); const handlePause = useCallback(() => { video.pause(); audio.pause(); }, [audio, video]); // render component const containerStyles = { border: '1px solid black', width: '90%', margin: '1em auto', minHeight: '500px', height: '700px', display: 'flex', flexDirection: 'column', } as React.CSSProperties; const offsetPixels = Math.floor(thumbnailWidth / 2); if (mediaSet == null) { // TODO: improve return <>; } return ( <>
{ video.currentTime = position; audio.currentTime = position; }} />
  • Frames: {mediaSet.audioFrames}
  • Viewport (frames): {viewport.start} to {viewport.end}
  • Selection (frames): {selection.start} to {selection.end}
  • Position (frames):{' '} {Math.round(mediaSet.audioFrames * (position.percent / 100))}
  • Position (seconds): {position.currentTime}
); } export default App; function millisFromDuration(dur?: Duration): number { if (dur == undefined) { return 0; } return Math.floor(dur.seconds * 1000.0 + dur.nanos / 1000.0 / 1000.0); } export function newRPC(): GrpcWebImpl { return new GrpcWebImpl(apiURL, {}); }