Proof of concept for input field with routing to video page
This commit is contained in:
parent
9ef4cc5853
commit
374137256e
|
@ -14,6 +14,7 @@
|
||||||
"google-protobuf": "^3.19.0",
|
"google-protobuf": "^3.19.0",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
|
"react-router-dom": "^6.2.1",
|
||||||
"react-scripts": "4.0.3",
|
"react-scripts": "4.0.3",
|
||||||
"typescript": "^4.1.2",
|
"typescript": "^4.1.2",
|
||||||
"web-vitals": "^1.0.1"
|
"web-vitals": "^1.0.1"
|
||||||
|
|
|
@ -1,403 +1,24 @@
|
||||||
import {
|
import { BrowserRouter, Route, Routes } from "react-router-dom";
|
||||||
MediaSet,
|
import HomePage from "./components/HomePage";
|
||||||
GrpcWebImpl,
|
import VideoPage from "./components/VideoPage";
|
||||||
MediaSetServiceClientImpl,
|
import { GrpcWebImpl } from "./generated/media_set";
|
||||||
GetVideoProgress,
|
import "./App.css";
|
||||||
GetPeaksProgress,
|
|
||||||
} from './generated/media_set';
|
|
||||||
|
|
||||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
const apiURL = process.env.REACT_APP_API_URL || "http://localhost:8888";
|
||||||
import { AudioFormat } from './generated/media_set';
|
|
||||||
import { VideoPreview } from './VideoPreview';
|
|
||||||
import { Overview, CanvasLogicalWidth } from './Overview';
|
|
||||||
import { Waveform } from './Waveform';
|
|
||||||
import { ControlBar } from './ControlBar';
|
|
||||||
import { SeekBar } from './SeekBar';
|
|
||||||
import './App.css';
|
|
||||||
import { Duration } from './generated/google/protobuf/duration';
|
|
||||||
import { firstValueFrom, from, Observable } from 'rxjs';
|
|
||||||
import { first, map } from 'rxjs/operators';
|
|
||||||
|
|
||||||
// ported from backend, where should they live?
|
|
||||||
const thumbnailWidth = 177;
|
|
||||||
const thumbnailHeight = 100;
|
|
||||||
|
|
||||||
const initialViewportCanvasPixels = 100;
|
|
||||||
|
|
||||||
const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888';
|
|
||||||
|
|
||||||
// Frames represents a range of audio frames.
|
|
||||||
export interface Frames {
|
|
||||||
start: number;
|
|
||||||
end: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface VideoPosition {
|
|
||||||
currentTime: number;
|
|
||||||
percent: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
const video = document.createElement('video');
|
|
||||||
const audio = document.createElement('audio');
|
|
||||||
|
|
||||||
function App(): JSX.Element {
|
function App(): JSX.Element {
|
||||||
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
|
|
||||||
const [viewport, setViewport] = useState<Frames>({ start: 0, end: 0 });
|
|
||||||
const [selection, setSelection] = useState<Frames>({ start: 0, end: 0 });
|
|
||||||
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
|
|
||||||
from([])
|
|
||||||
);
|
|
||||||
|
|
||||||
// position stores the current playback position. positionRef makes it
|
|
||||||
// available inside a setInterval callback.
|
|
||||||
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
|
|
||||||
const positionRef = useRef(position);
|
|
||||||
positionRef.current = position;
|
|
||||||
|
|
||||||
// effects
|
|
||||||
|
|
||||||
// TODO: error handling
|
|
||||||
const videoID = new URLSearchParams(window.location.search).get('video_id');
|
|
||||||
if (videoID == null) {
|
|
||||||
return <></>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// fetch mediaset on page load:
|
|
||||||
useEffect(() => {
|
|
||||||
(async function () {
|
|
||||||
const rpc = newRPC();
|
|
||||||
const service = new MediaSetServiceClientImpl(rpc);
|
|
||||||
const mediaSet = await service.Get({ youtubeId: videoID });
|
|
||||||
|
|
||||||
console.log('got media set:', mediaSet);
|
|
||||||
setMediaSet(mediaSet);
|
|
||||||
})();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const updatePlayerPositionIntevalMillis = 30;
|
|
||||||
|
|
||||||
// setup player on first page load only:
|
|
||||||
useEffect(() => {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const intervalID = setInterval(() => {
|
|
||||||
const currTime = audio.currentTime;
|
|
||||||
if (currTime == positionRef.current.currentTime) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
|
|
||||||
const percent = (currTime / duration) * 100;
|
|
||||||
|
|
||||||
// check if the end of selection has been passed, and pause if so:
|
|
||||||
if (
|
|
||||||
currentTimeToFrame(position.currentTime) < selection.end &&
|
|
||||||
currentTimeToFrame(currTime) >= selection.end
|
|
||||||
) {
|
|
||||||
handlePause();
|
|
||||||
}
|
|
||||||
|
|
||||||
// update the current position
|
|
||||||
setPosition({ currentTime: audio.currentTime, percent: percent });
|
|
||||||
}, updatePlayerPositionIntevalMillis);
|
|
||||||
|
|
||||||
return () => clearInterval(intervalID);
|
|
||||||
}, [mediaSet, selection]);
|
|
||||||
|
|
||||||
// bind to keypress handler.
|
|
||||||
// selection is a dependency of the handleKeyPress handler, and must be
|
|
||||||
// included here.
|
|
||||||
useEffect(() => {
|
|
||||||
document.addEventListener('keypress', handleKeyPress);
|
|
||||||
return () => document.removeEventListener('keypress', handleKeyPress);
|
|
||||||
}, [selection]);
|
|
||||||
|
|
||||||
// load audio when MediaSet is loaded:
|
|
||||||
useEffect(() => {
|
|
||||||
(async function () {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
console.log('fetching audio...');
|
|
||||||
const service = new MediaSetServiceClientImpl(newRPC());
|
|
||||||
const audioProgressStream = service.GetPeaks({
|
|
||||||
id: mediaSet.id,
|
|
||||||
numBins: CanvasLogicalWidth,
|
|
||||||
});
|
|
||||||
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
|
|
||||||
setOverviewPeaks(peaks);
|
|
||||||
|
|
||||||
const pipe = audioProgressStream.pipe(
|
|
||||||
first((progress: GetPeaksProgress) => progress.url != '')
|
|
||||||
);
|
|
||||||
const progressWithURL = await firstValueFrom(pipe);
|
|
||||||
|
|
||||||
audio.src = progressWithURL.url;
|
|
||||||
audio.muted = false;
|
|
||||||
audio.volume = 1;
|
|
||||||
console.log('set audio src', progressWithURL.url);
|
|
||||||
})();
|
|
||||||
}, [mediaSet]);
|
|
||||||
|
|
||||||
// load video when MediaSet is loaded:
|
|
||||||
useEffect(() => {
|
|
||||||
(async function () {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('fetching video...');
|
|
||||||
const service = new MediaSetServiceClientImpl(newRPC());
|
|
||||||
const videoProgressStream = service.GetVideo({ id: mediaSet.id });
|
|
||||||
const pipe = videoProgressStream.pipe(
|
|
||||||
first((progress: GetVideoProgress) => progress.url != '')
|
|
||||||
);
|
|
||||||
const progressWithURL = await firstValueFrom(pipe);
|
|
||||||
|
|
||||||
video.src = progressWithURL.url;
|
|
||||||
console.log('set video src', progressWithURL.url);
|
|
||||||
})();
|
|
||||||
}, [mediaSet]);
|
|
||||||
|
|
||||||
// set viewport when MediaSet is loaded:
|
|
||||||
useEffect(() => {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const numFrames = Math.min(
|
|
||||||
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
|
|
||||||
initialViewportCanvasPixels,
|
|
||||||
mediaSet.audioFrames
|
|
||||||
);
|
|
||||||
|
|
||||||
setViewport({ start: 0, end: numFrames });
|
|
||||||
}, [mediaSet]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
console.debug('viewport updated', viewport);
|
|
||||||
}, [viewport]);
|
|
||||||
|
|
||||||
// handlers
|
|
||||||
|
|
||||||
const handleKeyPress = useCallback(
|
|
||||||
(evt: KeyboardEvent) => {
|
|
||||||
if (evt.code != 'Space') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (audio.paused) {
|
|
||||||
handlePlay();
|
|
||||||
} else {
|
|
||||||
handlePause();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[selection]
|
|
||||||
);
|
|
||||||
|
|
||||||
// handler called when the selection in the overview (zoom setting) is changed.
|
|
||||||
const handleOverviewSelectionChange = useCallback(
|
|
||||||
(newViewport: Frames) => {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
console.log('set new viewport', newViewport);
|
|
||||||
setViewport({ ...newViewport });
|
|
||||||
|
|
||||||
if (!audio.paused) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setPositionFromFrame(newViewport.start);
|
|
||||||
},
|
|
||||||
[mediaSet, audio, video, selection]
|
|
||||||
);
|
|
||||||
|
|
||||||
// handler called when the selection in the main waveform view is changed.
|
|
||||||
const handleWaveformSelectionChange = useCallback(
|
|
||||||
(newSelection: Frames) => {
|
|
||||||
setSelection(newSelection);
|
|
||||||
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// move playback position to start of selection
|
|
||||||
const ratio = newSelection.start / mediaSet.audioFrames;
|
|
||||||
const currentTime =
|
|
||||||
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
|
|
||||||
audio.currentTime = currentTime;
|
|
||||||
video.currentTime = currentTime;
|
|
||||||
},
|
|
||||||
[mediaSet, audio, video, selection]
|
|
||||||
);
|
|
||||||
|
|
||||||
const handlePlay = useCallback(() => {
|
|
||||||
audio.play();
|
|
||||||
video.play();
|
|
||||||
}, [audio, video]);
|
|
||||||
|
|
||||||
const handlePause = useCallback(() => {
|
|
||||||
video.pause();
|
|
||||||
audio.pause();
|
|
||||||
|
|
||||||
if (selection.start != selection.end) {
|
|
||||||
setPositionFromFrame(selection.start);
|
|
||||||
}
|
|
||||||
}, [audio, video, selection]);
|
|
||||||
|
|
||||||
const handleClip = useCallback(() => {
|
|
||||||
(async function () {
|
|
||||||
console.debug('clip', selection);
|
|
||||||
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: support File System Access API fallback
|
|
||||||
const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' });
|
|
||||||
const fileStream = await h.createWritable();
|
|
||||||
|
|
||||||
const rpc = newRPC();
|
|
||||||
const service = new MediaSetServiceClientImpl(rpc);
|
|
||||||
const stream = service.GetAudioSegment({
|
|
||||||
id: mediaSet.id,
|
|
||||||
format: AudioFormat.MP3,
|
|
||||||
startFrame: selection.start,
|
|
||||||
endFrame: selection.end,
|
|
||||||
});
|
|
||||||
|
|
||||||
await stream.forEach((p) => fileStream.write(p.audioData));
|
|
||||||
console.debug('finished writing stream');
|
|
||||||
|
|
||||||
await fileStream.close();
|
|
||||||
console.debug('closed stream');
|
|
||||||
})();
|
|
||||||
}, [mediaSet, selection]);
|
|
||||||
|
|
||||||
const setPositionFromFrame = useCallback(
|
|
||||||
(frame: number) => {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const ratio = frame / mediaSet.audioFrames;
|
|
||||||
const currentTime =
|
|
||||||
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
|
|
||||||
audio.currentTime = currentTime;
|
|
||||||
video.currentTime = currentTime;
|
|
||||||
},
|
|
||||||
[mediaSet, audio, video]
|
|
||||||
);
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
const currentTimeToFrame = useCallback(
|
|
||||||
(currentTime: number): number => {
|
|
||||||
if (mediaSet == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
const dur = mediaSet.audioFrames / mediaSet.audioSampleRate;
|
|
||||||
const ratio = currentTime / dur;
|
|
||||||
return Math.round(mediaSet.audioFrames * ratio);
|
|
||||||
},
|
|
||||||
[mediaSet]
|
|
||||||
);
|
|
||||||
|
|
||||||
// render component
|
|
||||||
|
|
||||||
const containerStyles = {
|
|
||||||
border: '1px solid black',
|
|
||||||
width: '90%',
|
|
||||||
margin: '1em auto',
|
|
||||||
minHeight: '500px',
|
|
||||||
height: '700px',
|
|
||||||
display: 'flex',
|
|
||||||
flexDirection: 'column',
|
|
||||||
} as React.CSSProperties;
|
|
||||||
|
|
||||||
const offsetPixels = Math.floor(thumbnailWidth / 2);
|
|
||||||
|
|
||||||
if (mediaSet == null) {
|
|
||||||
// TODO: improve
|
|
||||||
return <></>;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<BrowserRouter>
|
||||||
<div className="App">
|
<Routes>
|
||||||
<div style={containerStyles}>
|
<Route path="/" element={<HomePage />} />
|
||||||
<ControlBar
|
<Route path="/video/:videoId" element={<VideoPage />} />
|
||||||
onPlay={handlePlay}
|
</Routes>
|
||||||
onPause={handlePause}
|
</BrowserRouter>
|
||||||
onClip={handleClip}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Overview
|
|
||||||
peaks={overviewPeaks}
|
|
||||||
mediaSet={mediaSet}
|
|
||||||
offsetPixels={offsetPixels}
|
|
||||||
height={80}
|
|
||||||
viewport={viewport}
|
|
||||||
position={position}
|
|
||||||
onSelectionChange={handleOverviewSelectionChange}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Waveform
|
|
||||||
mediaSet={mediaSet}
|
|
||||||
position={position}
|
|
||||||
viewport={viewport}
|
|
||||||
offsetPixels={offsetPixels}
|
|
||||||
onSelectionChange={handleWaveformSelectionChange}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<SeekBar
|
|
||||||
position={video.currentTime}
|
|
||||||
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
|
|
||||||
offsetPixels={offsetPixels}
|
|
||||||
onPositionChanged={(position: number) => {
|
|
||||||
video.currentTime = position;
|
|
||||||
audio.currentTime = position;
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<VideoPreview
|
|
||||||
mediaSet={mediaSet}
|
|
||||||
video={video}
|
|
||||||
position={position}
|
|
||||||
duration={millisFromDuration(mediaSet.videoDuration)}
|
|
||||||
height={thumbnailHeight}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<ul style={{ listStyleType: 'none' } as React.CSSProperties}>
|
|
||||||
<li>Frames: {mediaSet.audioFrames}</li>
|
|
||||||
<li>
|
|
||||||
Viewport (frames): {viewport.start} to {viewport.end}
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Selection (frames): {selection.start} to {selection.end}
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Position (frames):{' '}
|
|
||||||
{Math.round(mediaSet.audioFrames * (position.percent / 100))}
|
|
||||||
</li>
|
|
||||||
<li>Position (seconds): {position.currentTime}</li>
|
|
||||||
<li></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default App;
|
export default App;
|
||||||
|
|
||||||
function millisFromDuration(dur?: Duration): number {
|
|
||||||
if (dur == undefined) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return Math.floor(dur.seconds * 1000.0 + dur.nanos / 1000.0 / 1000.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function newRPC(): GrpcWebImpl {
|
export function newRPC(): GrpcWebImpl {
|
||||||
return new GrpcWebImpl(apiURL, {});
|
return new GrpcWebImpl(apiURL, {});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import { MediaSet } from './generated/media_set';
|
import { MediaSet } from './generated/media_set';
|
||||||
import { Frames, VideoPosition } from './App';
|
|
||||||
import { WaveformCanvas } from './WaveformCanvas';
|
import { WaveformCanvas } from './WaveformCanvas';
|
||||||
import { HudCanvas, EmptySelectionAction } from './HudCanvas';
|
import { HudCanvas, EmptySelectionAction } from './HudCanvas';
|
||||||
import { Observable } from 'rxjs';
|
import { Observable } from 'rxjs';
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { MediaSet, MediaSetServiceClientImpl } from './generated/media_set';
|
import { MediaSet, MediaSetServiceClientImpl } from './generated/media_set';
|
||||||
import { newRPC, VideoPosition } from './App';
|
import { newRPC } from './App';
|
||||||
import { useEffect, useRef } from 'react';
|
import { useEffect, useRef } from 'react';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import { useEffect, useState, useCallback } from 'react';
|
import { useEffect, useState, useCallback } from "react";
|
||||||
import { Frames, VideoPosition, newRPC } from './App';
|
import { newRPC } from "./App";
|
||||||
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
|
import { MediaSetServiceClientImpl, MediaSet } from "./generated/media_set";
|
||||||
import { WaveformCanvas } from './WaveformCanvas';
|
import { WaveformCanvas } from "./WaveformCanvas";
|
||||||
import { Selection, HudCanvas, EmptySelectionAction } from './HudCanvas';
|
import { Selection, HudCanvas, EmptySelectionAction } from "./HudCanvas";
|
||||||
import { from, Observable } from 'rxjs';
|
import { from, Observable } from "rxjs";
|
||||||
import { bufferCount } from 'rxjs/operators';
|
import { bufferCount } from "rxjs/operators";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
mediaSet: MediaSet;
|
mediaSet: MediaSet;
|
||||||
|
@ -45,7 +45,7 @@ export const Waveform: React.FC<Props> = ({
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('fetch audio segment, frames', viewport);
|
console.log("fetch audio segment, frames", viewport);
|
||||||
|
|
||||||
const service = new MediaSetServiceClientImpl(newRPC());
|
const service = new MediaSetServiceClientImpl(newRPC());
|
||||||
const segment = await service.GetPeaksForSegment({
|
const segment = await service.GetPeaksForSegment({
|
||||||
|
@ -55,7 +55,7 @@ export const Waveform: React.FC<Props> = ({
|
||||||
endFrame: viewport.end,
|
endFrame: viewport.end,
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('got segment', segment);
|
console.log("got segment", segment);
|
||||||
|
|
||||||
const peaks = from(segment.peaks).pipe(
|
const peaks = from(segment.peaks).pipe(
|
||||||
bufferCount(mediaSet.audioChannels)
|
bufferCount(mediaSet.audioChannels)
|
||||||
|
@ -119,17 +119,17 @@ export const Waveform: React.FC<Props> = ({
|
||||||
// render component
|
// render component
|
||||||
|
|
||||||
const containerStyles = {
|
const containerStyles = {
|
||||||
background: 'black',
|
background: "black",
|
||||||
margin: '0 ' + offsetPixels + 'px',
|
margin: "0 " + offsetPixels + "px",
|
||||||
flexGrow: 1,
|
flexGrow: 1,
|
||||||
position: 'relative',
|
position: "relative",
|
||||||
} as React.CSSProperties;
|
} as React.CSSProperties;
|
||||||
|
|
||||||
const hudStyles = {
|
const hudStyles = {
|
||||||
borderLineWidth: 0,
|
borderLineWidth: 0,
|
||||||
borderStrokeStyle: 'transparent',
|
borderStrokeStyle: "transparent",
|
||||||
positionLineWidth: 6,
|
positionLineWidth: 6,
|
||||||
positionStrokeStyle: 'red',
|
positionStrokeStyle: "red",
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
import { ChangeEventHandler, MouseEventHandler, useState } from "react";
|
||||||
|
import { useNavigate } from "react-router-dom";
|
||||||
|
|
||||||
|
const extractVideoIDFromURL = (input: string): string | null => {
|
||||||
|
const { searchParams } = new URL(input);
|
||||||
|
return searchParams.get("v");
|
||||||
|
};
|
||||||
|
|
||||||
|
function HomePage(): JSX.Element {
|
||||||
|
const [input, setInput] = useState<string>("");
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const handleChange: ChangeEventHandler<HTMLInputElement> = (event) => {
|
||||||
|
setInput(event.target.value);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSubmit: MouseEventHandler<HTMLButtonElement> = () => {
|
||||||
|
try {
|
||||||
|
const videoId = extractVideoIDFromURL(input);
|
||||||
|
if (videoId === null) {
|
||||||
|
setError("URL not valid, please enter a valid YouTube URL");
|
||||||
|
} else {
|
||||||
|
navigate(`/video/${videoId}`);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError("URL not valid, please enter a valid YouTube URL");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<input value={input} onChange={handleChange} />
|
||||||
|
<button onClick={handleSubmit}>Submit</button>
|
||||||
|
{Boolean(error) && <div>{error}</div>}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default HomePage;
|
|
@ -0,0 +1,391 @@
|
||||||
|
import {
|
||||||
|
MediaSet,
|
||||||
|
MediaSetServiceClientImpl,
|
||||||
|
GetVideoProgress,
|
||||||
|
GetPeaksProgress,
|
||||||
|
} from '../generated/media_set';
|
||||||
|
|
||||||
|
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||||
|
import { AudioFormat } from '../generated/media_set';
|
||||||
|
import { VideoPreview } from '../VideoPreview';
|
||||||
|
import { Overview, CanvasLogicalWidth } from '../Overview';
|
||||||
|
import { Waveform } from '../Waveform';
|
||||||
|
import { ControlBar } from '../ControlBar';
|
||||||
|
import { SeekBar } from '../SeekBar';
|
||||||
|
import { Duration } from '../generated/google/protobuf/duration';
|
||||||
|
import { firstValueFrom, from, Observable } from 'rxjs';
|
||||||
|
import { first, map } from 'rxjs/operators';
|
||||||
|
import { newRPC } from '../App';
|
||||||
|
import { useParams } from 'react-router-dom';
|
||||||
|
|
||||||
|
// ported from backend, where should they live?
|
||||||
|
const thumbnailWidth = 177;
|
||||||
|
const thumbnailHeight = 100;
|
||||||
|
|
||||||
|
const initialViewportCanvasPixels = 100;
|
||||||
|
|
||||||
|
// Frames represents a range of audio frames.
|
||||||
|
|
||||||
|
const video = document.createElement('video');
|
||||||
|
const audio = document.createElement('audio');
|
||||||
|
|
||||||
|
type VideoPageParams = {
|
||||||
|
videoId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function VideoPage(): JSX.Element {
|
||||||
|
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
|
||||||
|
const [viewport, setViewport] = useState<Frames>({ start: 0, end: 0 });
|
||||||
|
const [selection, setSelection] = useState<Frames>({ start: 0, end: 0 });
|
||||||
|
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
|
||||||
|
from([])
|
||||||
|
);
|
||||||
|
const { videoId } = useParams<VideoPageParams>();
|
||||||
|
|
||||||
|
// position stores the current playback position. positionRef makes it
|
||||||
|
// available inside a setInterval callback.
|
||||||
|
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
|
||||||
|
const positionRef = useRef(position);
|
||||||
|
positionRef.current = position;
|
||||||
|
|
||||||
|
// effects
|
||||||
|
|
||||||
|
// TODO: error handling
|
||||||
|
|
||||||
|
if (videoId == null) {
|
||||||
|
return <></>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetch mediaset on page load:
|
||||||
|
useEffect(() => {
|
||||||
|
(async function () {
|
||||||
|
const rpc = newRPC();
|
||||||
|
const service = new MediaSetServiceClientImpl(rpc);
|
||||||
|
const mediaSet = await service.Get({ youtubeId: videoId });
|
||||||
|
|
||||||
|
console.log('got media set:', mediaSet);
|
||||||
|
setMediaSet(mediaSet);
|
||||||
|
})();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const updatePlayerPositionIntevalMillis = 30;
|
||||||
|
|
||||||
|
// setup player on first page load only:
|
||||||
|
useEffect(() => {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const intervalID = setInterval(() => {
|
||||||
|
const currTime = audio.currentTime;
|
||||||
|
if (currTime == positionRef.current.currentTime) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
|
||||||
|
const percent = (currTime / duration) * 100;
|
||||||
|
|
||||||
|
// check if the end of selection has been passed, and pause if so:
|
||||||
|
if (
|
||||||
|
currentTimeToFrame(position.currentTime) < selection.end &&
|
||||||
|
currentTimeToFrame(currTime) >= selection.end
|
||||||
|
) {
|
||||||
|
handlePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// update the current position
|
||||||
|
setPosition({ currentTime: audio.currentTime, percent: percent });
|
||||||
|
}, updatePlayerPositionIntevalMillis);
|
||||||
|
|
||||||
|
return () => clearInterval(intervalID);
|
||||||
|
}, [mediaSet, selection]);
|
||||||
|
|
||||||
|
// bind to keypress handler.
|
||||||
|
// selection is a dependency of the handleKeyPress handler, and must be
|
||||||
|
// included here.
|
||||||
|
useEffect(() => {
|
||||||
|
document.addEventListener('keypress', handleKeyPress);
|
||||||
|
return () => document.removeEventListener('keypress', handleKeyPress);
|
||||||
|
}, [selection]);
|
||||||
|
|
||||||
|
// load audio when MediaSet is loaded:
|
||||||
|
useEffect(() => {
|
||||||
|
(async function () {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log('fetching audio...');
|
||||||
|
const service = new MediaSetServiceClientImpl(newRPC());
|
||||||
|
const audioProgressStream = service.GetPeaks({
|
||||||
|
id: mediaSet.id,
|
||||||
|
numBins: CanvasLogicalWidth,
|
||||||
|
});
|
||||||
|
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
|
||||||
|
setOverviewPeaks(peaks);
|
||||||
|
|
||||||
|
const pipe = audioProgressStream.pipe(
|
||||||
|
first((progress: GetPeaksProgress) => progress.url != '')
|
||||||
|
);
|
||||||
|
const progressWithURL = await firstValueFrom(pipe);
|
||||||
|
|
||||||
|
audio.src = progressWithURL.url;
|
||||||
|
audio.muted = false;
|
||||||
|
audio.volume = 1;
|
||||||
|
console.log('set audio src', progressWithURL.url);
|
||||||
|
})();
|
||||||
|
}, [mediaSet]);
|
||||||
|
|
||||||
|
// load video when MediaSet is loaded:
|
||||||
|
useEffect(() => {
|
||||||
|
(async function () {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('fetching video...');
|
||||||
|
const service = new MediaSetServiceClientImpl(newRPC());
|
||||||
|
const videoProgressStream = service.GetVideo({ id: mediaSet.id });
|
||||||
|
const pipe = videoProgressStream.pipe(
|
||||||
|
first((progress: GetVideoProgress) => progress.url != '')
|
||||||
|
);
|
||||||
|
const progressWithURL = await firstValueFrom(pipe);
|
||||||
|
|
||||||
|
video.src = progressWithURL.url;
|
||||||
|
console.log('set video src', progressWithURL.url);
|
||||||
|
})();
|
||||||
|
}, [mediaSet]);
|
||||||
|
|
||||||
|
// set viewport when MediaSet is loaded:
|
||||||
|
useEffect(() => {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const numFrames = Math.min(
|
||||||
|
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
|
||||||
|
initialViewportCanvasPixels,
|
||||||
|
mediaSet.audioFrames
|
||||||
|
);
|
||||||
|
|
||||||
|
setViewport({ start: 0, end: numFrames });
|
||||||
|
}, [mediaSet]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
console.debug('viewport updated', viewport);
|
||||||
|
}, [viewport]);
|
||||||
|
|
||||||
|
// handlers
|
||||||
|
|
||||||
|
const handleKeyPress = useCallback(
|
||||||
|
(evt: KeyboardEvent) => {
|
||||||
|
if (evt.code != 'Space') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (audio.paused) {
|
||||||
|
handlePlay();
|
||||||
|
} else {
|
||||||
|
handlePause();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[selection]
|
||||||
|
);
|
||||||
|
|
||||||
|
// handler called when the selection in the overview (zoom setting) is changed.
|
||||||
|
const handleOverviewSelectionChange = useCallback(
|
||||||
|
(newViewport: Frames) => {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log('set new viewport', newViewport);
|
||||||
|
setViewport({ ...newViewport });
|
||||||
|
|
||||||
|
if (!audio.paused) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setPositionFromFrame(newViewport.start);
|
||||||
|
},
|
||||||
|
[mediaSet, audio, video, selection]
|
||||||
|
);
|
||||||
|
|
||||||
|
// handler called when the selection in the main waveform view is changed.
|
||||||
|
const handleWaveformSelectionChange = useCallback(
|
||||||
|
(newSelection: Frames) => {
|
||||||
|
setSelection(newSelection);
|
||||||
|
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// move playback position to start of selection
|
||||||
|
const ratio = newSelection.start / mediaSet.audioFrames;
|
||||||
|
const currentTime =
|
||||||
|
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
|
||||||
|
audio.currentTime = currentTime;
|
||||||
|
video.currentTime = currentTime;
|
||||||
|
},
|
||||||
|
[mediaSet, audio, video, selection]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handlePlay = useCallback(() => {
|
||||||
|
audio.play();
|
||||||
|
video.play();
|
||||||
|
}, [audio, video]);
|
||||||
|
|
||||||
|
const handlePause = useCallback(() => {
|
||||||
|
video.pause();
|
||||||
|
audio.pause();
|
||||||
|
|
||||||
|
if (selection.start != selection.end) {
|
||||||
|
setPositionFromFrame(selection.start);
|
||||||
|
}
|
||||||
|
}, [audio, video, selection]);
|
||||||
|
|
||||||
|
const handleClip = useCallback(() => {
|
||||||
|
(async function () {
|
||||||
|
console.debug('clip', selection);
|
||||||
|
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: support File System Access API fallback
|
||||||
|
const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' });
|
||||||
|
const fileStream = await h.createWritable();
|
||||||
|
|
||||||
|
const rpc = newRPC();
|
||||||
|
const service = new MediaSetServiceClientImpl(rpc);
|
||||||
|
const stream = service.GetAudioSegment({
|
||||||
|
id: mediaSet.id,
|
||||||
|
format: AudioFormat.MP3,
|
||||||
|
startFrame: selection.start,
|
||||||
|
endFrame: selection.end,
|
||||||
|
});
|
||||||
|
|
||||||
|
await stream.forEach((p) => fileStream.write(p.audioData));
|
||||||
|
console.debug('finished writing stream');
|
||||||
|
|
||||||
|
await fileStream.close();
|
||||||
|
console.debug('closed stream');
|
||||||
|
})();
|
||||||
|
}, [mediaSet, selection]);
|
||||||
|
|
||||||
|
const setPositionFromFrame = useCallback(
|
||||||
|
(frame: number) => {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const ratio = frame / mediaSet.audioFrames;
|
||||||
|
const currentTime =
|
||||||
|
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
|
||||||
|
audio.currentTime = currentTime;
|
||||||
|
video.currentTime = currentTime;
|
||||||
|
},
|
||||||
|
[mediaSet, audio, video]
|
||||||
|
);
|
||||||
|
|
||||||
|
// helpers
|
||||||
|
|
||||||
|
const currentTimeToFrame = useCallback(
|
||||||
|
(currentTime: number): number => {
|
||||||
|
if (mediaSet == null) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
const dur = mediaSet.audioFrames / mediaSet.audioSampleRate;
|
||||||
|
const ratio = currentTime / dur;
|
||||||
|
return Math.round(mediaSet.audioFrames * ratio);
|
||||||
|
},
|
||||||
|
[mediaSet]
|
||||||
|
);
|
||||||
|
|
||||||
|
// render component
|
||||||
|
|
||||||
|
const containerStyles = {
|
||||||
|
border: '1px solid black',
|
||||||
|
width: '90%',
|
||||||
|
margin: '1em auto',
|
||||||
|
minHeight: '500px',
|
||||||
|
height: '700px',
|
||||||
|
display: 'flex',
|
||||||
|
flexDirection: 'column',
|
||||||
|
} as React.CSSProperties;
|
||||||
|
|
||||||
|
const offsetPixels = Math.floor(thumbnailWidth / 2);
|
||||||
|
|
||||||
|
if (mediaSet == null) {
|
||||||
|
// TODO: improve
|
||||||
|
return <></>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="App">
|
||||||
|
<div style={containerStyles}>
|
||||||
|
<ControlBar
|
||||||
|
onPlay={handlePlay}
|
||||||
|
onPause={handlePause}
|
||||||
|
onClip={handleClip}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Overview
|
||||||
|
peaks={overviewPeaks}
|
||||||
|
mediaSet={mediaSet}
|
||||||
|
offsetPixels={offsetPixels}
|
||||||
|
height={80}
|
||||||
|
viewport={viewport}
|
||||||
|
position={position}
|
||||||
|
onSelectionChange={handleOverviewSelectionChange}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Waveform
|
||||||
|
mediaSet={mediaSet}
|
||||||
|
position={position}
|
||||||
|
viewport={viewport}
|
||||||
|
offsetPixels={offsetPixels}
|
||||||
|
onSelectionChange={handleWaveformSelectionChange}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SeekBar
|
||||||
|
position={video.currentTime}
|
||||||
|
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
|
||||||
|
offsetPixels={offsetPixels}
|
||||||
|
onPositionChanged={(position: number) => {
|
||||||
|
video.currentTime = position;
|
||||||
|
audio.currentTime = position;
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<VideoPreview
|
||||||
|
mediaSet={mediaSet}
|
||||||
|
video={video}
|
||||||
|
position={position}
|
||||||
|
duration={millisFromDuration(mediaSet.videoDuration)}
|
||||||
|
height={thumbnailHeight}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<ul style={{ listStyleType: 'none' } as React.CSSProperties}>
|
||||||
|
<li>Frames: {mediaSet.audioFrames}</li>
|
||||||
|
<li>
|
||||||
|
Viewport (frames): {viewport.start} to {viewport.end}
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Selection (frames): {selection.start} to {selection.end}
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Position (frames):{' '}
|
||||||
|
{Math.round(mediaSet.audioFrames * (position.percent / 100))}
|
||||||
|
</li>
|
||||||
|
<li>Position (seconds): {position.currentTime}</li>
|
||||||
|
<li></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default VideoPage;
|
||||||
|
|
||||||
|
function millisFromDuration(dur?: Duration): number {
|
||||||
|
if (dur == undefined) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return Math.floor(dur.seconds * 1000.0 + dur.nanos / 1000.0 / 1000.0);
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
interface Frames {
|
||||||
|
start: number;
|
||||||
|
end: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface VideoPosition {
|
||||||
|
currentTime: number;
|
||||||
|
percent: number;
|
||||||
|
}
|
|
@ -1172,6 +1172,13 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
regenerator-runtime "^0.13.4"
|
regenerator-runtime "^0.13.4"
|
||||||
|
|
||||||
|
"@babel/runtime@^7.7.6":
|
||||||
|
version "7.16.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.16.7.tgz#03ff99f64106588c9c403c6ecb8c3bafbbdff1fa"
|
||||||
|
integrity sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==
|
||||||
|
dependencies:
|
||||||
|
regenerator-runtime "^0.13.4"
|
||||||
|
|
||||||
"@babel/template@^7.10.4", "@babel/template@^7.16.0", "@babel/template@^7.3.3":
|
"@babel/template@^7.10.4", "@babel/template@^7.16.0", "@babel/template@^7.3.3":
|
||||||
version "7.16.0"
|
version "7.16.0"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
|
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
|
||||||
|
@ -5648,6 +5655,13 @@ hex-color-regex@^1.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e"
|
resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e"
|
||||||
integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==
|
integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==
|
||||||
|
|
||||||
|
history@^5.2.0:
|
||||||
|
version "5.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/history/-/history-5.2.0.tgz#7cdd31cf9bac3c5d31f09c231c9928fad0007b7c"
|
||||||
|
integrity sha512-uPSF6lAJb3nSePJ43hN3eKj1dTWpN9gMod0ZssbFTIsen+WehTmEadgL+kg78xLJFdRfrrC//SavDzmRVdE+Ig==
|
||||||
|
dependencies:
|
||||||
|
"@babel/runtime" "^7.7.6"
|
||||||
|
|
||||||
hmac-drbg@^1.0.1:
|
hmac-drbg@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
|
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
|
||||||
|
@ -9324,6 +9338,21 @@ react-refresh@^0.8.3:
|
||||||
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"
|
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"
|
||||||
integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg==
|
integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg==
|
||||||
|
|
||||||
|
react-router-dom@^6.2.1:
|
||||||
|
version "6.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.2.1.tgz#32ec81829152fbb8a7b045bf593a22eadf019bec"
|
||||||
|
integrity sha512-I6Zax+/TH/cZMDpj3/4Fl2eaNdcvoxxHoH1tYOREsQ22OKDYofGebrNm6CTPUcvLvZm63NL/vzCYdjf9CUhqmA==
|
||||||
|
dependencies:
|
||||||
|
history "^5.2.0"
|
||||||
|
react-router "6.2.1"
|
||||||
|
|
||||||
|
react-router@6.2.1:
|
||||||
|
version "6.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.2.1.tgz#be2a97a6006ce1d9123c28934e604faef51448a3"
|
||||||
|
integrity sha512-2fG0udBtxou9lXtK97eJeET2ki5//UWfQSl1rlJ7quwe6jrktK9FCCc8dQb5QY6jAv3jua8bBQRhhDOM/kVRsg==
|
||||||
|
dependencies:
|
||||||
|
history "^5.2.0"
|
||||||
|
|
||||||
react-scripts@4.0.3:
|
react-scripts@4.0.3:
|
||||||
version "4.0.3"
|
version "4.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-4.0.3.tgz#b1cafed7c3fa603e7628ba0f187787964cb5d345"
|
resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-4.0.3.tgz#b1cafed7c3fa603e7628ba0f187787964cb5d345"
|
||||||
|
|
Loading…
Reference in New Issue