Refactor top-level state management => useReducer
continuous-integration/drone/push Build is passing Details

This commit is contained in:
Rob Watson 2022-02-03 19:56:05 +01:00
parent a855d589f3
commit 9f76d2764f
11 changed files with 719 additions and 547 deletions

View File

@ -1,40 +1,36 @@
import {
MediaSet,
GrpcWebImpl,
MediaSetServiceClientImpl,
GetVideoProgress,
GetPeaksProgress,
} from './generated/media_set';
import { useState, useEffect, useRef, useCallback } from 'react';
import { useEffect, useCallback, useReducer } from 'react';
import { State, stateReducer, zoomFactor } from './AppState';
import { AudioFormat } from './generated/media_set';
import { VideoPreview } from './VideoPreview';
import { Overview, CanvasLogicalWidth } from './Overview';
import { Waveform } from './Waveform';
import { SelectionChangeEvent } from './HudCanvas';
import { Selection, SelectionMode } from './HudCanvasState';
import { WaveformCanvas } from './WaveformCanvas';
import { HudCanvas } from './HudCanvas';
import { Player, PlayState } from './Player';
import {
CanvasLogicalWidth,
CanvasLogicalHeight,
EmptySelectionAction,
} from './HudCanvasState';
import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar';
import { firstValueFrom, from, Observable } from 'rxjs';
import { first, map } from 'rxjs/operators';
import { first, map, bufferCount } from 'rxjs/operators';
import millisFromDuration from './helpers/millisFromDuration';
import {
canZoomViewportIn,
canZoomViewportOut,
zoomViewportIn,
zoomViewportOut,
} from './helpers/zoom';
import { canZoomViewportIn, canZoomViewportOut } from './helpers/zoom';
import toHHMMSS from './helpers/toHHMMSS';
import framesToDuration from './helpers/framesToDuration';
import frameToWaveformCanvasX from './helpers/frameToWaveformCanvasX';
import { ClockIcon, ExternalLinkIcon } from '@heroicons/react/solid';
// ported from backend, where should they live?
const thumbnailWidth = 177; // height 100
const initialViewportCanvasPixels = 100;
const zoomFactor = 2;
const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888';
// Frames represents a range of audio frames.
@ -48,28 +44,34 @@ export interface VideoPosition {
percent: number;
}
export enum PlayState {
Paused,
Playing,
}
const video = document.createElement('video');
const audio = document.createElement('audio');
const initialState: State = {
selection: { start: 0, end: 0 },
viewport: { start: 0, end: 0 },
overviewPeaks: from([]),
waveformPeaks: from([]),
selectionCanvas: { start: 0, end: 0 },
viewportCanvas: { start: 0, end: 0 },
position: { currentTime: 0, frame: 0, percent: 0 },
audioSrc: '',
videoSrc: '',
currentTime: 0,
playState: PlayState.Paused,
};
function App(): JSX.Element {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [viewport, setViewport] = useState<Frames>({ start: 0, end: 0 });
const [selection, setSelection] = useState<Frames>({ start: 0, end: 0 });
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
from([])
);
const [playState, setPlayState] = useState(PlayState.Paused);
const [state, dispatch] = useReducer(stateReducer, { ...initialState });
// position stores the current playback position. positionRef makes it
// available inside a setInterval callback.
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
const positionRef = useRef(position);
positionRef.current = position;
const {
mediaSet,
waveformPeaks,
overviewPeaks,
selection,
selectionCanvas,
viewport,
viewportCanvas,
position,
playState,
} = state;
// effects
@ -87,7 +89,7 @@ function App(): JSX.Element {
const mediaSet = await service.Get({ youtubeId: videoID });
console.log('got media set:', mediaSet);
setMediaSet(mediaSet);
dispatch({ type: 'mediasetloaded', mediaSet: mediaSet });
// fetch audio asynchronously
console.log('fetching audio...');
@ -96,7 +98,7 @@ function App(): JSX.Element {
numBins: CanvasLogicalWidth,
});
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
setOverviewPeaks(peaks);
dispatch({ type: 'overviewpeaksloaded', peaks: peaks });
const audioPipe = audioProgressStream.pipe(
first((progress: GetPeaksProgress) => progress.url != '')
@ -113,73 +115,52 @@ function App(): JSX.Element {
// wait for both audio, then video.
const audioProgress = await fetchAudioTask;
audio.src = audioProgress.url;
audio.muted = false;
audio.volume = 1;
console.log('set audio src', audioProgress.url);
setMediaSet({ ...mediaSet, audioFrames: audioProgress.audioFrames });
dispatch({
type: 'audiosourceloaded',
src: audioProgress.url,
numFrames: audioProgress.audioFrames,
});
const videoProgress = await fetchVideoTask;
video.src = videoProgress.url;
console.log('set video src', videoProgress.url);
dispatch({ type: 'videosourceloaded', src: videoProgress.url });
})();
}, []);
const updatePlayerPositionIntevalMillis = 20;
// setup player on first page load only:
// load waveform peaks on MediaSet change
useEffect(() => {
if (mediaSet == null) {
return;
}
(async function () {
const { mediaSet, viewport } = state;
const intervalID = setInterval(() => {
const currTime = audio.currentTime;
if (currTime == positionRef.current.currentTime) {
if (mediaSet == null) {
return;
}
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
const percent = (currTime / duration) * 100;
// check if the end of selection has been passed, and pause if so:
if (
selection.start != selection.end &&
currentTimeToFrame(positionRef.current.currentTime) < selection.end &&
currentTimeToFrame(currTime) >= selection.end
) {
pause();
if (viewport.start >= viewport.end) {
return;
}
// update the current position
setPosition({ currentTime: audio.currentTime, percent: percent });
}, updatePlayerPositionIntevalMillis);
const service = new MediaSetServiceClientImpl(newRPC());
const segment = await service.GetPeaksForSegment({
id: mediaSet.id,
numBins: CanvasLogicalWidth,
startFrame: viewport.start,
endFrame: viewport.end,
});
return () => clearInterval(intervalID);
}, [mediaSet, selection]);
console.log('got segment', segment);
const peaks: Observable<number[]> = from(segment.peaks).pipe(
bufferCount(mediaSet.audioChannels)
);
dispatch({ type: 'waveformpeaksloaded', peaks: peaks });
})();
}, [viewport, mediaSet]);
// bind to keypress handler.
// selection is a dependency of the handleKeyPress handler, and must be
// included here.
useEffect(() => {
document.addEventListener('keypress', handleKeyPress);
return () => document.removeEventListener('keypress', handleKeyPress);
}, [selection, playState]);
// set viewport when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
const numFrames = Math.min(
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
initialViewportCanvasPixels,
mediaSet.audioFrames
);
setViewport({ start: 0, end: numFrames });
}, [mediaSet]);
});
useEffect(() => {
console.debug('viewport updated', viewport);
@ -187,90 +168,17 @@ function App(): JSX.Element {
// handlers
const togglePlay = () => (playState == PlayState.Paused ? play() : pause());
const play = () => dispatch({ type: 'play' });
const pause = () => dispatch({ type: 'pause' });
const handleKeyPress = (evt: KeyboardEvent) => {
if (evt.code != 'Space') {
return;
}
togglePlay();
};
// handler called when the selection in the overview (zoom setting) is changed.
const handleOverviewSelectionChange = ({
selection: newViewport,
}: SelectionChangeEvent) => {
if (mediaSet == null) {
return;
}
console.log('set new viewport', newViewport);
setViewport({ ...newViewport });
if (!audio.paused) {
return;
}
setPositionFromFrame(newViewport.start);
};
const setPositionAfterSelectionChange = (
newSelection: Selection,
mode: SelectionMode,
prevMode: SelectionMode
): boolean => {
// if creating a new selection from scratch, reset position on mouseup.
if (prevMode == SelectionMode.Selecting && mode == SelectionMode.Normal) {
return true;
}
// if re-entering normal mode, reset position if the current position is
// outside the new selection on mouseup.
if (prevMode != SelectionMode.Normal && mode == SelectionMode.Normal) {
const currFrame = currentTimeToFrame(positionRef.current.currentTime);
if (currFrame < newSelection.start || currFrame > newSelection.end) {
return true;
}
}
return false;
};
// handler called when the selection in the main waveform view is changed.
const handleWaveformSelectionChange = ({
selection: newSelection,
mode,
prevMode,
}: SelectionChangeEvent) => {
setSelection(newSelection);
if (setPositionAfterSelectionChange(newSelection, mode, prevMode)) {
setPositionFromFrame(newSelection.start);
}
};
const togglePlay = () => {
if (playState == PlayState.Paused) {
play();
} else {
pause();
}
};
const play = () => {
audio.play();
video.play();
setPlayState(PlayState.Playing);
};
const pause = () => {
video.pause();
audio.pause();
setPositionFromFrame(selection.start);
setPlayState(PlayState.Paused);
};
const handleClip = () => {
if (!window.showSaveFilePicker) {
downloadClipHTTP();
@ -338,71 +246,13 @@ function App(): JSX.Element {
})();
};
const handleZoomIn = () => {
if (mediaSet == null) {
return;
}
const newViewport = zoomViewportIn(
viewport,
mediaSet.audioFrames,
selection,
currentTimeToFrame(positionRef.current.currentTime),
zoomFactor
);
setViewport(newViewport);
};
const handleZoomOut = () => {
if (mediaSet == null) {
return;
}
const newViewport = zoomViewportOut(
viewport,
mediaSet.audioFrames,
selection,
currentTimeToFrame(positionRef.current.currentTime),
zoomFactor
);
setViewport(newViewport);
};
const setPositionFromFrame = useCallback(
(frame: number) => {
if (mediaSet == null) {
return;
}
const ratio = frame / mediaSet.audioFrames;
const currentTime =
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
audio.currentTime = currentTime;
video.currentTime = currentTime;
},
[mediaSet]
);
// helpers
const currentTimeToFrame = useCallback(
(currentTime: number): number => {
if (mediaSet == null) {
return 0;
}
const dur = mediaSet.audioFrames / mediaSet.audioSampleRate;
const ratio = currentTime / dur;
return Math.round(mediaSet.audioFrames * ratio);
},
[mediaSet]
);
const durationString = useCallback((): string => {
if (!mediaSet || !mediaSet.videoDuration) {
return '';
}
const { selection } = state;
const totalDur = toHHMMSS(mediaSet.videoDuration);
if (selection.start == selection.end) {
return totalDur;
@ -430,6 +280,15 @@ function App(): JSX.Element {
return (
<>
<Player
playState={playState}
audioSrc={state.audioSrc}
videoSrc={state.videoSrc}
currentTime={state.currentTime}
onPositionChanged={(currentTime) =>
dispatch({ type: 'positionchanged', currentTime: currentTime })
}
/>
<div className="App bg-gray-800 h-screen flex flex-col">
<header className="bg-green-900 h-16 grow-0 flex items-center mb-12 px-[88px]">
<h1 className="text-3xl font-bold">Clipper</h1>
@ -462,60 +321,95 @@ function App(): JSX.Element {
)}
onTogglePlay={togglePlay}
onClip={handleClip}
onZoomIn={handleZoomIn}
onZoomOut={handleZoomOut}
onZoomIn={() => dispatch({ type: 'zoomin' })}
onZoomOut={() => dispatch({ type: 'zoomout' })}
downloadClipEnabled={selection.start != selection.end}
/>
<div className="w-full bg-gray-600 h-6"></div>
<Overview
peaks={overviewPeaks}
mediaSet={mediaSet}
viewport={viewport}
position={position}
onSelectionChange={handleOverviewSelectionChange}
/>
<Waveform
mediaSet={mediaSet}
position={position}
viewport={viewport}
onSelectionChange={handleWaveformSelectionChange}
/>
<div className={`relative grow-0 h-16`}>
<WaveformCanvas
peaks={overviewPeaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="black"
fillStyle="#003300"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectPrevious}
styles={{
borderLineWidth: 4,
borderStrokeStyle: 'red',
positionLineWidth: 4,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: 'transparent',
}}
position={(position.percent / 100) * CanvasLogicalWidth}
selection={viewportCanvas}
onSelectionChange={(event) =>
dispatch({ type: 'viewportchanged', event })
}
/>
</div>
<div className={`relative grow`}>
<WaveformCanvas
peaks={waveformPeaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="green"
fillStyle="black"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectNothing}
styles={{
borderLineWidth: 0,
borderStrokeStyle: 'transparent',
positionLineWidth: 6,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: '#666666',
}}
position={frameToWaveformCanvasX(
position.frame,
viewport,
CanvasLogicalWidth
)}
selection={selectionCanvas}
onSelectionChange={(event) =>
dispatch({
type: 'waveformselectionchanged',
event,
})
}
/>
</div>
</div>
<SeekBar
position={video.currentTime}
position={position.currentTime}
duration={mediaSet.audioFrames / mediaSet.audioSampleRate}
offsetPixels={offsetPixels}
onPositionChanged={(position: number) => {
video.currentTime = position;
audio.currentTime = position;
onPositionChanged={(currentTime: number) => {
dispatch({ type: 'skip', currentTime });
}}
/>
<VideoPreview
mediaSet={mediaSet}
video={video}
video={document.createElement('video')}
position={position}
duration={millisFromDuration(mediaSet.videoDuration)}
/>
</div>
<ul className="hidden">
<li>Frames: {mediaSet.audioFrames}</li>
<li>
Viewport (frames): {viewport.start} to {viewport.end}
</li>
<li>
Selection (frames): {selection.start} to {selection.end}
</li>
<li>
Position (frames):{' '}
{Math.round(mediaSet.audioFrames * (position.percent / 100))}
</li>
<li>Position (seconds): {position.currentTime}</li>
<li></li>
</ul>
</div>
</>
);

435
frontend/src/AppState.tsx Normal file
View File

@ -0,0 +1,435 @@
import { MediaSet } from './generated/media_set';
import { Observable } from 'rxjs';
import { SelectionChangeEvent } from './HudCanvas';
import { SelectionMode, CanvasLogicalWidth } from './HudCanvasState';
import { PlayState } from './Player';
import { zoomViewportIn, zoomViewportOut } from './helpers/zoom';
import frameToWaveformCanvasX from './helpers/frameToWaveformCanvasX';
export const zoomFactor = 2;
const initialViewportCanvasPixels = 100;
export interface FrameRange {
start: number;
end: number;
}
// TODO: rename to x1, x2
interface CanvasRange {
start: number;
end: number;
}
interface Position {
currentTime: number;
frame: number;
percent: number;
}
export interface State {
mediaSet?: MediaSet;
selection: FrameRange;
viewport: FrameRange;
overviewPeaks: Observable<number[]>;
waveformPeaks: Observable<number[]>;
// selection canvas. Not kept up-to-date, only used for pushing updates.
selectionCanvas: CanvasRange;
// viewport canvas. Not kept up-to-date, only used for pushing updates.
viewportCanvas: CanvasRange;
audioSrc: string;
videoSrc: string;
position: Position;
// playback position in seconds, only used for forcing a change of position.
currentTime?: number;
playState: PlayState;
}
interface MediaSetLoadedAction {
type: 'mediasetloaded';
mediaSet: MediaSet;
}
interface OverviewPeaksLoadedAction {
type: 'overviewpeaksloaded';
peaks: Observable<number[]>;
}
interface WaveformPeaksLoadedAction {
type: 'waveformpeaksloaded';
peaks: Observable<number[]>;
}
interface AudioSourceLoadedAction {
type: 'audiosourceloaded';
numFrames: number;
src: string;
}
interface VideoSourceLoadedAction {
type: 'videosourceloaded';
src: string;
}
interface SetViewportAction {
type: 'setviewport';
viewport: FrameRange;
}
interface ZoomInAction {
type: 'zoomin';
}
interface ZoomOutAction {
type: 'zoomout';
}
interface ViewportChangedAction {
type: 'viewportchanged';
event: SelectionChangeEvent;
}
interface WaveformSelectionChangedAction {
type: 'waveformselectionchanged';
event: SelectionChangeEvent;
}
interface PositionChangedAction {
type: 'positionchanged';
currentTime: number;
}
interface SkipAction {
type: 'skip';
currentTime: number;
}
interface PlayAction {
type: 'play';
}
interface PauseAction {
type: 'pause';
}
type Action =
| MediaSetLoadedAction
| OverviewPeaksLoadedAction
| WaveformPeaksLoadedAction
| AudioSourceLoadedAction
| VideoSourceLoadedAction
| SetViewportAction
| ZoomInAction
| ZoomOutAction
| ViewportChangedAction
| WaveformSelectionChangedAction
| PositionChangedAction
| SkipAction
| PlayAction
| PauseAction;
export const stateReducer = (state: State, action: Action): State => {
switch (action.type) {
case 'mediasetloaded':
return handleMediaSetLoaded(state, action);
case 'overviewpeaksloaded':
return handleOverviewPeaksLoaded(state, action);
case 'waveformpeaksloaded':
return handleWaveformPeaksLoaded(state, action);
case 'audiosourceloaded':
return handleAudioSourceLoaded(state, action);
case 'videosourceloaded':
return handleVideoSourceLoaded(state, action);
case 'setviewport':
return setViewport(state, action);
case 'zoomin':
return handleZoomIn(state);
case 'zoomout':
return handleZoomOut(state);
case 'viewportchanged':
return handleViewportChanged(state, action);
case 'waveformselectionchanged':
return handleWaveformSelectionChanged(state, action);
case 'positionchanged':
return handlePositionChanged(state, action);
case 'skip':
return skip(state, action);
case 'play':
return play(state);
case 'pause':
return pause(state);
}
};
function handleMediaSetLoaded(
state: State,
{ mediaSet }: MediaSetLoadedAction
): State {
const numFrames = Math.min(
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
initialViewportCanvasPixels,
mediaSet.audioFrames
);
return setViewport(
{ ...state, mediaSet },
{ type: 'setviewport', viewport: { start: 0, end: numFrames } }
);
}
function handleOverviewPeaksLoaded(
state: State,
{ peaks }: OverviewPeaksLoadedAction
) {
return { ...state, overviewPeaks: peaks };
}
function handleWaveformPeaksLoaded(
state: State,
{ peaks }: WaveformPeaksLoadedAction
) {
return { ...state, waveformPeaks: peaks };
}
function handleAudioSourceLoaded(
state: State,
{ src, numFrames }: AudioSourceLoadedAction
): State {
const mediaSet = state.mediaSet;
if (mediaSet == null) {
return state;
}
return {
...state,
audioSrc: src,
mediaSet: { ...mediaSet, audioFrames: numFrames },
};
}
function handleVideoSourceLoaded(
state: State,
{ src }: VideoSourceLoadedAction
): State {
return { ...state, videoSrc: src };
}
function setViewport(state: State, { viewport }: SetViewportAction): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
return {
...state,
viewport: viewport,
viewportCanvas: {
start: Math.round(
(viewport.start / mediaSet.audioFrames) * CanvasLogicalWidth
),
end: Math.round(
(viewport.end / mediaSet.audioFrames) * CanvasLogicalWidth
),
},
selectionCanvas: selectionToWaveformCanvasRange(selection, viewport),
};
}
function handleZoomIn(state: State): State {
const {
mediaSet,
viewport,
selection,
position: { frame },
} = state;
if (!mediaSet) {
return state;
}
const newViewport = zoomViewportIn(
viewport,
mediaSet.audioFrames,
selection,
frame,
zoomFactor
);
// TODO: refactoring zoom helpers to use CanvasRange may avoid this step:
return setViewport(state, { type: 'setviewport', viewport: newViewport });
}
function handleZoomOut(state: State): State {
const {
mediaSet,
viewport,
selection,
position: { currentTime },
} = state;
if (!mediaSet) {
return state;
}
const newViewport = zoomViewportOut(
viewport,
mediaSet.audioFrames,
selection,
currentTime,
zoomFactor
);
// TODO: refactoring zoom helpers to use CanvasRange may avoid this step:
return setViewport(state, { type: 'setviewport', viewport: newViewport });
}
function handleViewportChanged(
state: State,
{ event: { mode, selection: canvasRange } }: ViewportChangedAction
): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
if (mode != SelectionMode.Normal) {
return state;
}
const newViewport = {
start: Math.round(
mediaSet.audioFrames * (canvasRange.start / CanvasLogicalWidth)
),
end: Math.round(
mediaSet.audioFrames * (canvasRange.end / CanvasLogicalWidth)
),
};
return {
...state,
viewport: newViewport,
selectionCanvas: selectionToWaveformCanvasRange(selection, newViewport),
};
}
function handleWaveformSelectionChanged(
state: State,
{
event: { mode, prevMode, selection: canvasRange },
}: WaveformSelectionChangedAction
): State {
const {
mediaSet,
playState,
viewport,
position: { frame: currFrame },
} = state;
if (mediaSet == null) {
return state;
}
const framesPerPixel = (viewport.end - viewport.start) / CanvasLogicalWidth;
const newSelection = {
start: Math.round(viewport.start + canvasRange.start * framesPerPixel),
end: Math.round(viewport.start + canvasRange.end * framesPerPixel),
};
let currentTime = state.currentTime;
if (
prevMode != SelectionMode.Normal &&
mode == SelectionMode.Normal &&
(playState == PlayState.Paused ||
currFrame < newSelection.start ||
currFrame > newSelection.end)
) {
currentTime = newSelection.start / mediaSet.audioSampleRate;
}
return {
...state,
selection: newSelection,
currentTime: currentTime,
};
}
function handlePositionChanged(
state: State,
{ currentTime }: PositionChangedAction
): State {
const {
mediaSet,
selection,
position: { frame: prevFrame },
} = state;
if (mediaSet == null) {
return state;
}
const frame = Math.round(currentTime * mediaSet.audioSampleRate);
const percent = (frame / mediaSet.audioFrames) * 100;
// reset play position and pause if selection end passed.
let playState = state.playState;
let forceCurrentTime;
if (
selection.start != selection.end &&
prevFrame < selection.end &&
frame >= selection.end
) {
playState = PlayState.Paused;
forceCurrentTime = selection.start / mediaSet.audioSampleRate;
console.log('forceCurrentTime', forceCurrentTime);
}
return {
...state,
playState,
currentTime: forceCurrentTime,
position: {
currentTime,
frame,
percent,
},
};
}
function skip(state: State, { currentTime }: SkipAction): State {
return { ...state, currentTime: currentTime };
}
function play(state: State): State {
return { ...state, playState: PlayState.Playing };
}
function pause(state: State): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
let currentTime;
if (selection.start != selection.end) {
currentTime = selection.start / mediaSet.audioSampleRate;
}
return { ...state, currentTime, playState: PlayState.Paused };
}
// helpers
function selectionToWaveformCanvasRange(
selection: FrameRange,
viewport: FrameRange
): CanvasRange {
const x1 =
frameToWaveformCanvasX(selection.start, viewport, CanvasLogicalWidth) || 0;
const x2 =
frameToWaveformCanvasX(selection.end, viewport, CanvasLogicalWidth) || 0;
if (x1 == x2) {
return { start: 0, end: 0 };
}
return { start: x1 || 0, end: x2 || CanvasLogicalWidth };
}

View File

@ -1,5 +1,5 @@
import React from 'react';
import { PlayState } from './App';
import { PlayState } from './Player';
import {
CloudDownloadIcon,
FastForwardIcon,

View File

@ -70,7 +70,7 @@ export const HudCanvas: React.FC<Props> = ({
hoverPositionStrokeStyle,
},
position,
selection: initialSelection,
selection: selection,
onSelectionChange,
}: Props) => {
const canvasRef = useRef<HTMLCanvasElement>(null);
@ -78,15 +78,15 @@ export const HudCanvas: React.FC<Props> = ({
const [state, dispatch] = useReducer(stateReducer, {
...initialState,
width,
selection,
emptySelectionAction,
selection: initialSelection,
});
// side effects
useEffect(() => {
dispatch({ selection: initialSelection, x: 0, type: 'setselection' });
}, [initialSelection]);
dispatch({ selection: selection, x: 0, type: 'setselection' });
}, [selection]);
// handle global mouse up
useEffect(() => {
@ -146,8 +146,8 @@ export const HudCanvas: React.FC<Props> = ({
const hoverX = state.hoverX;
if (
hoverX != null &&
(hoverX < currentSelection.start || hoverX > currentSelection.end)
(hoverX != 0 && hoverX < currentSelection.start) ||
hoverX > currentSelection.end
) {
ctx.beginPath();
ctx.strokeStyle = hoverPositionStrokeStyle;

View File

@ -131,6 +131,7 @@ describe('stateReducer', () => {
expect(state.mode).toEqual(SelectionMode.Dragging);
expect(state.selection).toEqual({ start: 2000, end: 3000 });
expect(state.mousedownX).toEqual(475);
expect(state.hoverX).toEqual(0);
expect(state.shouldPublish).toBeFalsy();
});
});

View File

@ -1,5 +1,8 @@
import constrainNumeric from './helpers/constrainNumeric';
export const CanvasLogicalWidth = 2000;
export const CanvasLogicalHeight = 500;
export enum HoverState {
Normal,
OverSelectionStart,
@ -60,6 +63,7 @@ export const stateReducer = (
): State => {
let mode: SelectionMode;
let newSelection: Selection;
let hoverX: number;
let mousedownX: number;
let cursorClass: string;
let hoverState: HoverState;
@ -71,6 +75,7 @@ export const stateReducer = (
mousedownX = prevMousedownX;
mode = SelectionMode.Normal;
cursorClass = 'cursor-auto';
hoverX = x;
hoverState = HoverState.Normal;
shouldPublish = false;
@ -78,6 +83,7 @@ export const stateReducer = (
case 'mousedown':
mousedownX = x;
cursorClass = 'cursor-auto';
hoverX = x;
hoverState = HoverState.Normal;
if (isHoveringSelectionStart(x, prevSelection)) {
@ -104,6 +110,7 @@ export const stateReducer = (
mousedownX = prevMousedownX;
mode = SelectionMode.Normal;
cursorClass = 'cursor-auto';
hoverX = x;
hoverState = HoverState.Normal;
if (
@ -119,11 +126,13 @@ export const stateReducer = (
mousedownX = prevMousedownX;
mode = prevMode;
cursorClass = 'cursor-auto';
hoverX = 0;
hoverState = HoverState.Normal;
break;
case 'mousemove':
mousedownX = prevMousedownX;
hoverX = x;
hoverState = HoverState.Normal;
switch (prevMode) {
@ -225,7 +234,7 @@ export const stateReducer = (
return {
width: width,
emptySelectionAction: emptySelectionAction,
hoverX: x,
hoverX: hoverX,
selection: newSelection,
origSelection: origSelection,
mousedownX: mousedownX,

View File

@ -1,120 +0,0 @@
import { useState, useEffect } from 'react';
import { MediaSet } from './generated/media_set';
import { Frames, VideoPosition } from './App';
import { WaveformCanvas } from './WaveformCanvas';
import {
HudCanvas,
EmptySelectionAction,
SelectionChangeEvent,
} from './HudCanvas';
import { SelectionMode } from './HudCanvasState';
import { Observable } from 'rxjs';
export interface Selection {
start: number;
end: number;
}
interface Props {
peaks: Observable<number[]>;
mediaSet: MediaSet;
position: VideoPosition;
viewport: Frames;
onSelectionChange: (selectionState: SelectionChangeEvent) => void;
}
export const CanvasLogicalWidth = 2_000;
export const CanvasLogicalHeight = 500;
export const Overview: React.FC<Props> = ({
peaks,
mediaSet,
position,
viewport,
onSelectionChange,
}: Props) => {
const [selectedPixels, setSelectedPixels] = useState({ start: 0, end: 0 });
const [positionPixels, setPositionPixels] = useState(0);
// side effects
// convert viewport from frames to canvas pixels.
// TODO: consider an adapter component to handle this.
useEffect(() => {
setSelectedPixels({
start: Math.round(
(viewport.start / mediaSet.audioFrames) * CanvasLogicalWidth
),
end: Math.round(
(viewport.end / mediaSet.audioFrames) * CanvasLogicalWidth
),
});
}, [viewport, mediaSet]);
// convert position from frames to canvas pixels:
// TODO: consider an adapter component to handle this.
useEffect(() => {
const ratio =
position.currentTime / (mediaSet.audioFrames / mediaSet.audioSampleRate);
setPositionPixels(Math.round(ratio * CanvasLogicalWidth));
frames;
}, [mediaSet, position]);
// handlers
// convert selection change from canvas pixels to frames, and trigger callback.
const handleSelectionChange = (selectionState: SelectionChangeEvent) => {
const {
mode,
prevMode,
selection: { start, end },
} = selectionState;
if (mode != SelectionMode.Normal || prevMode == SelectionMode.Normal) {
return;
}
onSelectionChange({
...selectionState,
selection: {
start: Math.round((start / CanvasLogicalWidth) * mediaSet.audioFrames),
end: Math.round((end / CanvasLogicalWidth) * mediaSet.audioFrames),
},
});
};
// render component
const hudStyles = {
borderLineWidth: 4,
borderStrokeStyle: 'red',
positionLineWidth: 4,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: 'transparent',
};
return (
<>
<div className={`relative grow-0 h-16`}>
<WaveformCanvas
peaks={peaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="black"
fillStyle="#003300"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectPrevious}
styles={hudStyles}
position={positionPixels}
selection={selectedPixels}
onSelectionChange={handleSelectionChange}
/>
</div>
</>
);
};

79
frontend/src/Player.tsx Normal file
View File

@ -0,0 +1,79 @@
import { useEffect, useRef } from 'react';
interface Props {
playState: PlayState;
audioSrc: string;
videoSrc: string;
// used to jump to a new position:
currentTime?: number;
onPositionChanged: (currentTime: number) => void;
}
export enum PlayState {
Paused,
Playing,
}
const triggerCallbackIntervalMillis = 20;
export const Player: React.FC<Props> = ({
playState,
audioSrc,
videoSrc,
currentTime,
onPositionChanged,
}) => {
const audioRef = useRef(new Audio());
const videoRef = useRef(document.createElement('video'));
useEffect(() => {
setInterval(() => {
if (audioRef.current.paused) {
return;
}
onPositionChanged(audioRef.current.currentTime);
}, triggerCallbackIntervalMillis);
}, []);
useEffect(() => {
if (playState == PlayState.Paused && !audioRef.current.paused) {
audioRef.current.pause();
videoRef.current.pause();
return;
}
if (playState == PlayState.Playing && audioRef.current.paused) {
audioRef.current.play();
videoRef.current.play();
return;
}
}, [playState]);
useEffect(() => {
if (audioSrc == '') {
return;
}
audioRef.current.src = audioSrc;
console.log('set audio src', audioSrc);
}, [audioSrc]);
useEffect(() => {
if (videoSrc == '') {
return;
}
videoRef.current.src = videoSrc;
console.log('set video src', videoSrc);
}, [videoSrc]);
useEffect(() => {
if (currentTime == undefined) {
return;
}
audioRef.current.currentTime = currentTime;
videoRef.current.currentTime = currentTime;
onPositionChanged(currentTime);
}, [currentTime]);
return null;
};

View File

@ -1,160 +0,0 @@
import { useEffect, useState } from 'react';
import { Frames, VideoPosition, newRPC } from './App';
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
import { WaveformCanvas } from './WaveformCanvas';
import { HudCanvas, SelectionChangeEvent } from './HudCanvas';
import { EmptySelectionAction, SelectionMode } from './HudCanvasState';
import { from, Observable } from 'rxjs';
import { bufferCount } from 'rxjs/operators';
interface Props {
mediaSet: MediaSet;
position: VideoPosition;
viewport: Frames;
onSelectionChange: (selectionState: SelectionChangeEvent) => void;
}
export const CanvasLogicalWidth = 2000;
export const CanvasLogicalHeight = 500;
export const Waveform: React.FC<Props> = ({
mediaSet,
position,
viewport,
onSelectionChange,
}: Props) => {
const [peaks, setPeaks] = useState<Observable<number[]>>(from([]));
const [selectedFrames, setSelectedFrames] = useState({ start: 0, end: 0 });
// selectedPixels are the currently selected waveform canvas pixels. They may
// not match the actual selection due to the viewport setting, and probably
// shouldn't be leaking outside of the HudCanvasState.
const [selectedPixels, setSelectedPixels] = useState({
start: 0,
end: 0,
});
const [positionPixels, setPositionPixels] = useState<number | null>(0);
// effects
// load peaks on MediaSet change
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
if (viewport.start >= viewport.end) {
return;
}
const service = new MediaSetServiceClientImpl(newRPC());
const segment = await service.GetPeaksForSegment({
id: mediaSet.id,
numBins: CanvasLogicalWidth,
startFrame: viewport.start,
endFrame: viewport.end,
});
console.log('got segment', segment);
const peaks = from(segment.peaks).pipe(
bufferCount(mediaSet.audioChannels)
);
setPeaks(peaks);
})();
}, [viewport, mediaSet]);
// convert position to canvas pixels
useEffect(() => {
const frame = Math.round(position.currentTime * mediaSet.audioSampleRate);
if (frame < viewport.start || frame > viewport.end) {
setPositionPixels(null);
return;
}
const pixelsPerFrame = CanvasLogicalWidth / (viewport.end - viewport.start);
const positionPixels = (frame - viewport.start) * pixelsPerFrame;
setPositionPixels(positionPixels);
}, [mediaSet, position, viewport]);
// update selectedPixels on viewport change
useEffect(() => {
const start = Math.max(frameToCanvasX(selectedFrames.start), 0);
const end = Math.min(
frameToCanvasX(selectedFrames.end),
CanvasLogicalWidth
);
setSelectedPixels({ start, end });
}, [viewport, selectedFrames]);
// handlers
// convert selection change from canvas pixels to frames, and trigger callback.
const handleSelectionChange = (selectionState: SelectionChangeEvent) => {
const { mode, prevMode, selection } = selectionState;
const framesPerPixel = (viewport.end - viewport.start) / CanvasLogicalWidth;
const selectedFrames = {
start: Math.round(viewport.start + selection.start * framesPerPixel),
end: Math.round(viewport.start + selection.end * framesPerPixel),
};
if (mode == SelectionMode.Normal && prevMode != SelectionMode.Normal) {
setSelectedPixels(selection);
setSelectedFrames(selectedFrames);
}
onSelectionChange({
...selectionState,
selection: selectedFrames,
});
};
// helpers
const frameToCanvasX = (frame: number): number => {
const numFrames = viewport.end - viewport.start;
if (numFrames == 0) {
return 0;
}
const pixelsPerFrame = CanvasLogicalWidth / numFrames;
return Math.round((frame - viewport.start) * pixelsPerFrame);
};
// render component
const hudStyles = {
borderLineWidth: 0,
borderStrokeStyle: 'transparent',
positionLineWidth: 6,
positionStrokeStyle: 'red',
hoverPositionStrokeStyle: '#666666',
};
return (
<>
<div className={`relative grow`}>
<WaveformCanvas
peaks={peaks}
channels={mediaSet.audioChannels}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="green"
fillStyle="black"
alpha={1}
></WaveformCanvas>
<HudCanvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
emptySelectionAction={EmptySelectionAction.SelectNothing}
styles={hudStyles}
position={positionPixels}
selection={selectedPixels}
onSelectionChange={handleSelectionChange}
/>
</div>
</>
);
};

View File

@ -0,0 +1,18 @@
import frameToWaveformCanvasX from './frameToWaveformCanvasX';
describe('frameToWaveformCanvasX', () => {
it('returns null when the frame is before the viewport', () => {
const x = frameToWaveformCanvasX(100, { start: 200, end: 300 }, 2000);
expect(x).toBeNull();
});
it('returns null when the frame is after the viewport', () => {
const x = frameToWaveformCanvasX(400, { start: 200, end: 300 }, 2000);
expect(x).toBeNull();
});
it('returns the expected coordinate when the frame is inside the viewport', () => {
const x = frameToWaveformCanvasX(251, { start: 200, end: 300 }, 2000);
expect(x).toEqual(1020);
});
});

View File

@ -0,0 +1,16 @@
import { FrameRange } from '../AppState';
function frameToWaveformCanvasX(
frame: number,
viewport: FrameRange,
canvasWidth: number
): number | null {
if (frame < viewport.start || frame > viewport.end) {
return null;
}
const pixelsPerFrame = canvasWidth / (viewport.end - viewport.start);
return (frame - viewport.start) * pixelsPerFrame;
}
export default frameToWaveformCanvasX;