import { MediaSet, GrpcWebImpl, MediaSetServiceClientImpl, GetVideoProgress, GetPeaksProgress, } from './generated/media_set'; import { useState, useEffect, useRef, useCallback } from 'react'; import { AudioFormat } from './generated/media_set'; import { VideoPreview } from './VideoPreview'; import { Overview, CanvasLogicalWidth } from './Overview'; import { Waveform } from './Waveform'; import { ControlBar } from './ControlBar'; import { SeekBar } from './SeekBar'; import './App.css'; import { firstValueFrom, from, Observable } from 'rxjs'; import { first, map } from 'rxjs/operators'; import millisFromDuration from './helpers/millisFromDuration'; // ported from backend, where should they live? const thumbnailWidth = 177; const thumbnailHeight = 100; const initialViewportCanvasPixels = 100; const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888'; // Frames represents a range of audio frames. export interface Frames { start: number; end: number; } export interface VideoPosition { currentTime: number; percent: number; } const video = document.createElement('video'); const audio = document.createElement('audio'); function App(): JSX.Element { const [mediaSet, setMediaSet] = useState(null); const [viewport, setViewport] = useState({ start: 0, end: 0 }); const [selection, setSelection] = useState({ start: 0, end: 0 }); const [overviewPeaks, setOverviewPeaks] = useState>( from([]) ); // position stores the current playback position. positionRef makes it // available inside a setInterval callback. const [position, setPosition] = useState({ currentTime: 0, percent: 0 }); const positionRef = useRef(position); positionRef.current = position; // effects // TODO: error handling const videoID = new URLSearchParams(window.location.search).get('video_id'); if (videoID == null) { return <>; } // fetch mediaset on page load: useEffect(() => { (async function () { const rpc = newRPC(); const service = new MediaSetServiceClientImpl(rpc); const mediaSet = await service.Get({ youtubeId: videoID }); console.log('got media set:', mediaSet); setMediaSet(mediaSet); })(); }, []); const updatePlayerPositionIntevalMillis = 30; // setup player on first page load only: useEffect(() => { if (mediaSet == null) { return; } const intervalID = setInterval(() => { const currTime = audio.currentTime; if (currTime == positionRef.current.currentTime) { return; } const duration = mediaSet.audioFrames / mediaSet.audioSampleRate; const percent = (currTime / duration) * 100; // check if the end of selection has been passed, and pause if so: if ( currentTimeToFrame(position.currentTime) < selection.end && currentTimeToFrame(currTime) >= selection.end ) { handlePause(); } // update the current position setPosition({ currentTime: audio.currentTime, percent: percent }); }, updatePlayerPositionIntevalMillis); return () => clearInterval(intervalID); }, [mediaSet, selection]); // bind to keypress handler. // selection is a dependency of the handleKeyPress handler, and must be // included here. useEffect(() => { document.addEventListener('keypress', handleKeyPress); return () => document.removeEventListener('keypress', handleKeyPress); }, [selection]); // load audio when MediaSet is loaded: useEffect(() => { (async function () { if (mediaSet == null) { return; } console.log('fetching audio...'); const service = new MediaSetServiceClientImpl(newRPC()); const audioProgressStream = service.GetPeaks({ id: mediaSet.id, numBins: CanvasLogicalWidth, }); const peaks = audioProgressStream.pipe(map((progress) => progress.peaks)); setOverviewPeaks(peaks); const pipe = audioProgressStream.pipe( first((progress: GetPeaksProgress) => progress.url != '') ); const progressWithURL = await firstValueFrom(pipe); audio.src = progressWithURL.url; audio.muted = false; audio.volume = 1; console.log('set audio src', progressWithURL.url); })(); }, [mediaSet]); // load video when MediaSet is loaded: useEffect(() => { (async function () { if (mediaSet == null) { return; } console.log('fetching video...'); const service = new MediaSetServiceClientImpl(newRPC()); const videoProgressStream = service.GetVideo({ id: mediaSet.id }); const pipe = videoProgressStream.pipe( first((progress: GetVideoProgress) => progress.url != '') ); const progressWithURL = await firstValueFrom(pipe); video.src = progressWithURL.url; console.log('set video src', progressWithURL.url); })(); }, [mediaSet]); // set viewport when MediaSet is loaded: useEffect(() => { if (mediaSet == null) { return; } const numFrames = Math.min( Math.round(mediaSet.audioFrames / CanvasLogicalWidth) * initialViewportCanvasPixels, mediaSet.audioFrames ); setViewport({ start: 0, end: numFrames }); }, [mediaSet]); useEffect(() => { console.debug('viewport updated', viewport); }, [viewport]); // handlers const handleKeyPress = (evt: KeyboardEvent) => { if (evt.code != 'Space') { return; } if (audio.paused) { handlePlay(); } else { handlePause(); } }; // handler called when the selection in the overview (zoom setting) is changed. const handleOverviewSelectionChange = (newViewport: Frames) => { if (mediaSet == null) { return; } console.log('set new viewport', newViewport); setViewport({ ...newViewport }); if (!audio.paused) { return; } setPositionFromFrame(newViewport.start); }; // handler called when the selection in the main waveform view is changed. const handleWaveformSelectionChange = (newSelection: Frames) => { setSelection(newSelection); if (mediaSet == null) { return; } // move playback position to start of selection const ratio = newSelection.start / mediaSet.audioFrames; const currentTime = (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; audio.currentTime = currentTime; video.currentTime = currentTime; }; const handlePlay = () => { audio.play(); video.play(); }; const handlePause = () => { video.pause(); audio.pause(); if (selection.start != selection.end) { setPositionFromFrame(selection.start); } }; const handleClip = () => { if (!window.showSaveFilePicker) { downloadClipHTTP(); return; } downloadClipFileSystemAccessAPI(); }; const downloadClipHTTP = () => { (async function () { if (mediaSet == null) { return; } console.debug('clip http', selection); const form = document.createElement('form'); form.method = 'POST'; form.action = `${apiURL}/api/media_sets/${mediaSet.id}/clip`; const startFrameInput = document.createElement('input'); startFrameInput.type = 'hidden'; startFrameInput.name = 'start_frame'; startFrameInput.value = String(selection.start); form.appendChild(startFrameInput); const endFrameInput = document.createElement('input'); endFrameInput.type = 'hidden'; endFrameInput.name = 'end_frame'; endFrameInput.value = String(selection.end); form.appendChild(endFrameInput); const formatInput = document.createElement('input'); formatInput.type = 'hidden'; formatInput.name = 'format'; formatInput.value = 'mp3'; form.appendChild(formatInput); document.body.appendChild(form); form.submit(); })(); }; const downloadClipFileSystemAccessAPI = () => { (async function () { if (mediaSet == null) { return; } console.debug('clip grpc', selection); const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' }); const fileStream = await h.createWritable(); const rpc = newRPC(); const service = new MediaSetServiceClientImpl(rpc); const stream = service.GetAudioSegment({ id: mediaSet.id, format: AudioFormat.MP3, startFrame: selection.start, endFrame: selection.end, }); await stream.forEach((p) => fileStream.write(p.audioData)); console.debug('finished writing stream'); await fileStream.close(); console.debug('closed stream'); })(); }; const handleZoomIn = () => { if (mediaSet == null) { return; } if (viewport.start == viewport.end) { return; } setViewport({ ...viewport, end: viewport.end - Math.round((viewport.end - viewport.start) / 2), }); }; const handleZoomOut = () => { if (mediaSet == null) { return; } if (viewport.start == viewport.end) { return; } let end = viewport.end + Math.round(viewport.end - viewport.start); if (end > mediaSet.audioFrames) { end = mediaSet.audioFrames; } setViewport({ ...viewport, end: end, }); }; const setPositionFromFrame = useCallback( (frame: number) => { if (mediaSet == null) { return; } const ratio = frame / mediaSet.audioFrames; const currentTime = (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; audio.currentTime = currentTime; video.currentTime = currentTime; }, [mediaSet] ); // helpers const currentTimeToFrame = useCallback( (currentTime: number): number => { if (mediaSet == null) { return 0; } const dur = mediaSet.audioFrames / mediaSet.audioSampleRate; const ratio = currentTime / dur; return Math.round(mediaSet.audioFrames * ratio); }, [mediaSet] ); // render component const containerStyles = { border: '1px solid black', width: '90%', margin: '1em auto', minHeight: '500px', height: '700px', display: 'flex', flexDirection: 'column', } as React.CSSProperties; const offsetPixels = Math.floor(thumbnailWidth / 2); if (mediaSet == null) { // TODO: improve return <>; } return ( <>
{ video.currentTime = position; audio.currentTime = position; }} />
  • Frames: {mediaSet.audioFrames}
  • Viewport (frames): {viewport.start} to {viewport.end}
  • Selection (frames): {selection.start} to {selection.end}
  • Position (frames):{' '} {Math.round(mediaSet.audioFrames * (position.percent / 100))}
  • Position (seconds): {position.currentTime}
); } export default App; export function newRPC(): GrpcWebImpl { return new GrpcWebImpl(apiURL, {}); }