195 lines
5.8 KiB
TypeScript
195 lines
5.8 KiB
TypeScript
import { MediaSet } from './generated/media_set';
|
|
import { stateReducer, State } from './AppState';
|
|
import { from } from 'rxjs';
|
|
import { PlayState } from './Player';
|
|
import { CanvasWidth, SelectionMode } from './HudCanvasState';
|
|
|
|
const initialState: State = {
|
|
selection: { start: 0, end: 0 },
|
|
viewport: { start: 0, end: 441000 },
|
|
overviewPeaks: from([]),
|
|
waveformPeaks: from([]),
|
|
selectionCanvas: { x1: 0, x2: 0 },
|
|
viewportCanvas: { x1: 0, x2: CanvasWidth },
|
|
position: { currentTime: 0, frame: 0, percent: 0 },
|
|
audioSrc: '',
|
|
videoSrc: '',
|
|
currentTime: 0,
|
|
playState: PlayState.Paused,
|
|
};
|
|
|
|
describe('stateReducer', () => {
|
|
describe('mediasetloaded', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 4410000,
|
|
wantViewportFrames: 220500,
|
|
},
|
|
{
|
|
audioFrames: 44100,
|
|
wantViewportFrames: 2200,
|
|
},
|
|
])(
|
|
'mediaset with $audioFrames frames',
|
|
({ audioFrames, wantViewportFrames }) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{ ...initialState },
|
|
{ type: 'mediasetloaded', mediaSet: mediaSet }
|
|
);
|
|
expect(state.mediaSet).toBe(mediaSet);
|
|
expect(state.viewport.start).toEqual(0);
|
|
expect(state.viewport.end).toEqual(wantViewportFrames);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
|
|
describe('setviewport', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 44100 },
|
|
selection: { start: 0, end: 0 },
|
|
wantViewportCanvas: { x1: 0, x2: 200 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 441000 },
|
|
selection: { start: 0, end: 0 },
|
|
wantViewportCanvas: { x1: 0, x2: 2000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 441000 },
|
|
selection: { start: 0, end: 44100 },
|
|
wantViewportCanvas: { x1: 0, x2: 2000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 200 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 22050 },
|
|
selection: { start: 0, end: 44100 },
|
|
wantViewportCanvas: { x1: 0, x2: 100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 2000 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 44100, end: 88200 },
|
|
selection: { start: 22050, end: 66150 },
|
|
wantViewportCanvas: { x1: 200, x2: 400 },
|
|
wantSelectionCanvas: { x1: 0, x2: 1000 },
|
|
},
|
|
])(
|
|
'selection $selection.start-$selection.end, viewport: $viewport.start-$viewport.end',
|
|
({
|
|
audioFrames,
|
|
viewport,
|
|
selection,
|
|
wantViewportCanvas,
|
|
wantSelectionCanvas,
|
|
}) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{ ...initialState, mediaSet: mediaSet, selection: selection },
|
|
{ type: 'setviewport', viewport: viewport }
|
|
);
|
|
|
|
expect(state.viewport).toEqual(viewport);
|
|
expect(state.selection).toEqual(selection);
|
|
expect(state.viewportCanvas).toEqual(wantViewportCanvas);
|
|
expect(state.selectionCanvas).toEqual(wantSelectionCanvas);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
|
|
describe('viewportchanged', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Selecting,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 0 },
|
|
wantViewport: { start: 0, end: 441000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 0 },
|
|
wantViewport: { start: 0, end: 44100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 22050 },
|
|
wantViewport: { start: 0, end: 44100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 1000 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 1000, x2: 1500 },
|
|
},
|
|
selection: { start: 220500, end: 264600 },
|
|
wantViewport: { start: 220500, end: 330750 },
|
|
wantSelectionCanvas: { x1: 0, x2: 800 },
|
|
},
|
|
])(
|
|
'mode $event.mode, audioFrames $audioFrames, canvas range $event.selection.x1-$event.selection.x2, selectedFrames $selection.start-$selection.end',
|
|
({
|
|
audioFrames,
|
|
event,
|
|
selection,
|
|
wantViewport,
|
|
wantSelectionCanvas,
|
|
}) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{
|
|
...initialState,
|
|
mediaSet: mediaSet,
|
|
viewport: { start: 0, end: audioFrames },
|
|
selection: selection,
|
|
},
|
|
{ type: 'viewportchanged', event }
|
|
);
|
|
|
|
expect(state.selection).toEqual(selection);
|
|
expect(state.viewport).toEqual(wantViewport);
|
|
expect(state.selectionCanvas).toEqual(wantSelectionCanvas);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
});
|