clipper/frontend/src/AppState.tsx

436 lines
9.6 KiB
TypeScript

import { MediaSet } from './generated/media_set';
import { Observable } from 'rxjs';
import { SelectionChangeEvent } from './HudCanvas';
import { SelectionMode, CanvasLogicalWidth } from './HudCanvasState';
import { PlayState } from './Player';
import { zoomViewportIn, zoomViewportOut } from './helpers/zoom';
import frameToWaveformCanvasX from './helpers/frameToWaveformCanvasX';
export const zoomFactor = 2;
const initialViewportCanvasPixels = 100;
export interface FrameRange {
start: number;
end: number;
}
// TODO: rename to x1, x2
interface CanvasRange {
start: number;
end: number;
}
interface Position {
currentTime: number;
frame: number;
percent: number;
}
export interface State {
mediaSet?: MediaSet;
selection: FrameRange;
viewport: FrameRange;
overviewPeaks: Observable<number[]>;
waveformPeaks: Observable<number[]>;
// selection canvas. Not kept up-to-date, only used for pushing updates.
selectionCanvas: CanvasRange;
// viewport canvas. Not kept up-to-date, only used for pushing updates.
viewportCanvas: CanvasRange;
audioSrc: string;
videoSrc: string;
position: Position;
// playback position in seconds, only used for forcing a change of position.
currentTime?: number;
playState: PlayState;
}
interface MediaSetLoadedAction {
type: 'mediasetloaded';
mediaSet: MediaSet;
}
interface OverviewPeaksLoadedAction {
type: 'overviewpeaksloaded';
peaks: Observable<number[]>;
}
interface WaveformPeaksLoadedAction {
type: 'waveformpeaksloaded';
peaks: Observable<number[]>;
}
interface AudioSourceLoadedAction {
type: 'audiosourceloaded';
numFrames: number;
src: string;
}
interface VideoSourceLoadedAction {
type: 'videosourceloaded';
src: string;
}
interface SetViewportAction {
type: 'setviewport';
viewport: FrameRange;
}
interface ZoomInAction {
type: 'zoomin';
}
interface ZoomOutAction {
type: 'zoomout';
}
interface ViewportChangedAction {
type: 'viewportchanged';
event: SelectionChangeEvent;
}
interface WaveformSelectionChangedAction {
type: 'waveformselectionchanged';
event: SelectionChangeEvent;
}
interface PositionChangedAction {
type: 'positionchanged';
currentTime: number;
}
interface SkipAction {
type: 'skip';
currentTime: number;
}
interface PlayAction {
type: 'play';
}
interface PauseAction {
type: 'pause';
}
type Action =
| MediaSetLoadedAction
| OverviewPeaksLoadedAction
| WaveformPeaksLoadedAction
| AudioSourceLoadedAction
| VideoSourceLoadedAction
| SetViewportAction
| ZoomInAction
| ZoomOutAction
| ViewportChangedAction
| WaveformSelectionChangedAction
| PositionChangedAction
| SkipAction
| PlayAction
| PauseAction;
export const stateReducer = (state: State, action: Action): State => {
switch (action.type) {
case 'mediasetloaded':
return handleMediaSetLoaded(state, action);
case 'overviewpeaksloaded':
return handleOverviewPeaksLoaded(state, action);
case 'waveformpeaksloaded':
return handleWaveformPeaksLoaded(state, action);
case 'audiosourceloaded':
return handleAudioSourceLoaded(state, action);
case 'videosourceloaded':
return handleVideoSourceLoaded(state, action);
case 'setviewport':
return setViewport(state, action);
case 'zoomin':
return handleZoomIn(state);
case 'zoomout':
return handleZoomOut(state);
case 'viewportchanged':
return handleViewportChanged(state, action);
case 'waveformselectionchanged':
return handleWaveformSelectionChanged(state, action);
case 'positionchanged':
return handlePositionChanged(state, action);
case 'skip':
return skip(state, action);
case 'play':
return play(state);
case 'pause':
return pause(state);
}
};
function handleMediaSetLoaded(
state: State,
{ mediaSet }: MediaSetLoadedAction
): State {
const numFrames = Math.min(
Math.round(mediaSet.audioFrames / CanvasLogicalWidth) *
initialViewportCanvasPixels,
mediaSet.audioFrames
);
return setViewport(
{ ...state, mediaSet },
{ type: 'setviewport', viewport: { start: 0, end: numFrames } }
);
}
function handleOverviewPeaksLoaded(
state: State,
{ peaks }: OverviewPeaksLoadedAction
) {
return { ...state, overviewPeaks: peaks };
}
function handleWaveformPeaksLoaded(
state: State,
{ peaks }: WaveformPeaksLoadedAction
) {
return { ...state, waveformPeaks: peaks };
}
function handleAudioSourceLoaded(
state: State,
{ src, numFrames }: AudioSourceLoadedAction
): State {
const mediaSet = state.mediaSet;
if (mediaSet == null) {
return state;
}
return {
...state,
audioSrc: src,
mediaSet: { ...mediaSet, audioFrames: numFrames },
};
}
function handleVideoSourceLoaded(
state: State,
{ src }: VideoSourceLoadedAction
): State {
return { ...state, videoSrc: src };
}
function setViewport(state: State, { viewport }: SetViewportAction): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
return {
...state,
viewport: viewport,
viewportCanvas: {
start: Math.round(
(viewport.start / mediaSet.audioFrames) * CanvasLogicalWidth
),
end: Math.round(
(viewport.end / mediaSet.audioFrames) * CanvasLogicalWidth
),
},
selectionCanvas: selectionToWaveformCanvasRange(selection, viewport),
};
}
function handleZoomIn(state: State): State {
const {
mediaSet,
viewport,
selection,
position: { frame },
} = state;
if (!mediaSet) {
return state;
}
const newViewport = zoomViewportIn(
viewport,
mediaSet.audioFrames,
selection,
frame,
zoomFactor
);
// TODO: refactoring zoom helpers to use CanvasRange may avoid this step:
return setViewport(state, { type: 'setviewport', viewport: newViewport });
}
function handleZoomOut(state: State): State {
const {
mediaSet,
viewport,
selection,
position: { currentTime },
} = state;
if (!mediaSet) {
return state;
}
const newViewport = zoomViewportOut(
viewport,
mediaSet.audioFrames,
selection,
currentTime,
zoomFactor
);
// TODO: refactoring zoom helpers to use CanvasRange may avoid this step:
return setViewport(state, { type: 'setviewport', viewport: newViewport });
}
function handleViewportChanged(
state: State,
{ event: { mode, selection: canvasRange } }: ViewportChangedAction
): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
if (mode != SelectionMode.Normal) {
return state;
}
const newViewport = {
start: Math.round(
mediaSet.audioFrames * (canvasRange.start / CanvasLogicalWidth)
),
end: Math.round(
mediaSet.audioFrames * (canvasRange.end / CanvasLogicalWidth)
),
};
return {
...state,
viewport: newViewport,
selectionCanvas: selectionToWaveformCanvasRange(selection, newViewport),
};
}
function handleWaveformSelectionChanged(
state: State,
{
event: { mode, prevMode, selection: canvasRange },
}: WaveformSelectionChangedAction
): State {
const {
mediaSet,
playState,
viewport,
position: { frame: currFrame },
} = state;
if (mediaSet == null) {
return state;
}
const framesPerPixel = (viewport.end - viewport.start) / CanvasLogicalWidth;
const newSelection = {
start: Math.round(viewport.start + canvasRange.start * framesPerPixel),
end: Math.round(viewport.start + canvasRange.end * framesPerPixel),
};
let currentTime = state.currentTime;
if (
prevMode != SelectionMode.Normal &&
mode == SelectionMode.Normal &&
(playState == PlayState.Paused ||
currFrame < newSelection.start ||
currFrame > newSelection.end)
) {
currentTime = newSelection.start / mediaSet.audioSampleRate;
}
return {
...state,
selection: newSelection,
currentTime: currentTime,
};
}
function handlePositionChanged(
state: State,
{ currentTime }: PositionChangedAction
): State {
const {
mediaSet,
selection,
position: { frame: prevFrame },
} = state;
if (mediaSet == null) {
return state;
}
const frame = Math.round(currentTime * mediaSet.audioSampleRate);
const percent = (frame / mediaSet.audioFrames) * 100;
// reset play position and pause if selection end passed.
let playState = state.playState;
let forceCurrentTime;
if (
selection.start != selection.end &&
prevFrame < selection.end &&
frame >= selection.end
) {
playState = PlayState.Paused;
forceCurrentTime = selection.start / mediaSet.audioSampleRate;
console.log('forceCurrentTime', forceCurrentTime);
}
return {
...state,
playState,
currentTime: forceCurrentTime,
position: {
currentTime,
frame,
percent,
},
};
}
function skip(state: State, { currentTime }: SkipAction): State {
return { ...state, currentTime: currentTime };
}
function play(state: State): State {
return { ...state, playState: PlayState.Playing };
}
function pause(state: State): State {
const { mediaSet, selection } = state;
if (!mediaSet) {
return state;
}
let currentTime;
if (selection.start != selection.end) {
currentTime = selection.start / mediaSet.audioSampleRate;
}
return { ...state, currentTime, playState: PlayState.Paused };
}
// helpers
function selectionToWaveformCanvasRange(
selection: FrameRange,
viewport: FrameRange
): CanvasRange {
const x1 =
frameToWaveformCanvasX(selection.start, viewport, CanvasLogicalWidth) || 0;
const x2 =
frameToWaveformCanvasX(selection.end, viewport, CanvasLogicalWidth) || 0;
if (x1 == x2) {
return { start: 0, end: 0 };
}
return { start: x1 || 0, end: x2 || CanvasLogicalWidth };
}