316 lines
9.7 KiB
TypeScript
316 lines
9.7 KiB
TypeScript
import { MediaSet } from './generated/media_set';
|
|
import { stateReducer, State } from './AppState';
|
|
import { from } from 'rxjs';
|
|
import { PlayState } from './Player';
|
|
import { CanvasWidth, SelectionMode } from './HudCanvasState';
|
|
|
|
const initialState: State = {
|
|
selection: { start: 0, end: 0 },
|
|
viewport: { start: 0, end: 441000 },
|
|
overviewPeaks: from([]),
|
|
waveformPeaks: from([]),
|
|
selectionCanvas: { x1: 0, x2: 0 },
|
|
viewportCanvas: { x1: 0, x2: CanvasWidth },
|
|
position: { currentTime: 0, frame: 0, percent: 0 },
|
|
audioSrc: '',
|
|
videoSrc: '',
|
|
currentTime: undefined,
|
|
playState: PlayState.Paused,
|
|
};
|
|
|
|
describe('stateReducer', () => {
|
|
describe('mediasetloaded', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 4410000,
|
|
wantViewportFrames: 220500,
|
|
},
|
|
{
|
|
audioFrames: 44100,
|
|
wantViewportFrames: 2200,
|
|
},
|
|
])(
|
|
'mediaset with $audioFrames frames',
|
|
({ audioFrames, wantViewportFrames }) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{ ...initialState },
|
|
{ type: 'mediasetloaded', mediaSet: mediaSet }
|
|
);
|
|
expect(state.mediaSet).toBe(mediaSet);
|
|
expect(state.viewport.start).toEqual(0);
|
|
expect(state.viewport.end).toEqual(wantViewportFrames);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
|
|
describe('setviewport', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 44100 },
|
|
selection: { start: 0, end: 0 },
|
|
wantViewportCanvas: { x1: 0, x2: 200 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 441000 },
|
|
selection: { start: 0, end: 0 },
|
|
wantViewportCanvas: { x1: 0, x2: 2000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 441000 },
|
|
selection: { start: 0, end: 44100 },
|
|
wantViewportCanvas: { x1: 0, x2: 2000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 200 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 0, end: 22050 },
|
|
selection: { start: 0, end: 44100 },
|
|
wantViewportCanvas: { x1: 0, x2: 100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 2000 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
viewport: { start: 44100, end: 88200 },
|
|
selection: { start: 22050, end: 66150 },
|
|
wantViewportCanvas: { x1: 200, x2: 400 },
|
|
wantSelectionCanvas: { x1: 0, x2: 1000 },
|
|
},
|
|
])(
|
|
'selection $selection.start-$selection.end, viewport: $viewport.start-$viewport.end',
|
|
({
|
|
audioFrames,
|
|
viewport,
|
|
selection,
|
|
wantViewportCanvas,
|
|
wantSelectionCanvas,
|
|
}) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{ ...initialState, mediaSet: mediaSet, selection: selection },
|
|
{ type: 'setviewport', viewport: viewport }
|
|
);
|
|
|
|
expect(state.viewport).toEqual(viewport);
|
|
expect(state.selection).toEqual(selection);
|
|
expect(state.viewportCanvas).toEqual(wantViewportCanvas);
|
|
expect(state.selectionCanvas).toEqual(wantSelectionCanvas);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
|
|
describe('viewportchanged', () => {
|
|
describe.each([
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Selecting,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 0 },
|
|
wantViewport: { start: 0, end: 441000 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 0 },
|
|
wantViewport: { start: 0, end: 44100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 0 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 200 },
|
|
},
|
|
selection: { start: 0, end: 22050 },
|
|
wantViewport: { start: 0, end: 44100 },
|
|
wantSelectionCanvas: { x1: 0, x2: 1000 },
|
|
},
|
|
{
|
|
audioFrames: 441000,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 1000, x2: 1500 },
|
|
},
|
|
selection: { start: 220500, end: 264600 },
|
|
wantViewport: { start: 220500, end: 330750 },
|
|
wantSelectionCanvas: { x1: 0, x2: 800 },
|
|
},
|
|
])(
|
|
'mode $event.mode, audioFrames $audioFrames, canvas range $event.selection.x1-$event.selection.x2, selectedFrames $selection.start-$selection.end',
|
|
({
|
|
audioFrames,
|
|
event,
|
|
selection,
|
|
wantViewport,
|
|
wantSelectionCanvas,
|
|
}) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: audioFrames,
|
|
});
|
|
const state = stateReducer(
|
|
{
|
|
...initialState,
|
|
mediaSet: mediaSet,
|
|
viewport: { start: 0, end: audioFrames },
|
|
selection: selection,
|
|
},
|
|
{ type: 'viewportchanged', event }
|
|
);
|
|
|
|
expect(state.selection).toEqual(selection);
|
|
expect(state.viewport).toEqual(wantViewport);
|
|
expect(state.selectionCanvas).toEqual(wantSelectionCanvas);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
|
|
describe('waveformselectionchanged', () => {
|
|
describe.each([
|
|
{
|
|
name: 'paused',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Selecting,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 100, x2: 200 },
|
|
},
|
|
playState: PlayState.Paused,
|
|
position: { frame: 0, currentTime: 0, percent: 0 },
|
|
viewport: { start: 0, end: 88200 },
|
|
wantSelection: { start: 4410, end: 8820 },
|
|
wantCurrentTime: undefined,
|
|
},
|
|
{
|
|
name: 'playing, viewport 100%, selection is in progress',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Selecting,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 200, x2: 220 },
|
|
},
|
|
playState: PlayState.Playing,
|
|
position: { frame: 22000, currentTime: 0.4988, percent: 4.98 },
|
|
viewport: { start: 0, end: 441000 },
|
|
wantSelection: { start: 44100, end: 48510 },
|
|
wantCurrentTime: undefined,
|
|
},
|
|
{
|
|
name: 'playing, viewport partial, selection is in progress',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Selecting,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 0, x2: 100 },
|
|
},
|
|
playState: PlayState.Playing,
|
|
position: { frame: 22000, currentTime: 0.4988, percent: 4.98 },
|
|
viewport: { start: 88200, end: 176400 },
|
|
wantSelection: { start: 88200, end: 92610 },
|
|
wantCurrentTime: undefined,
|
|
},
|
|
{
|
|
name: 'playing, selection is ending, currFrame is before selection start',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 1001, x2: 1200 },
|
|
},
|
|
playState: PlayState.Playing,
|
|
position: { frame: 22000, currentTime: 0.4988, percent: 4.98 },
|
|
viewport: { start: 0, end: 88200 },
|
|
wantSelection: { start: 44144, end: 52920 },
|
|
wantCurrentTime: 1.000997732426304,
|
|
},
|
|
{
|
|
name: 'playing, selection is ending, currFrame is within selection',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 1001, x2: 1200 },
|
|
},
|
|
playState: PlayState.Playing,
|
|
position: { frame: 50000, currentTime: 1.133, percent: 11.33 },
|
|
viewport: { start: 0, end: 88200 },
|
|
wantSelection: { start: 44144, end: 52920 },
|
|
wantCurrentTime: undefined,
|
|
},
|
|
{
|
|
name: 'playing, selection is ending, currFrame is after selection end',
|
|
audioSampleRate: 44100,
|
|
event: {
|
|
mode: SelectionMode.Normal,
|
|
prevMode: SelectionMode.Selecting,
|
|
selection: { x1: 1001, x2: 1200 },
|
|
},
|
|
playState: PlayState.Playing,
|
|
position: { frame: 88200, currentTime: 2.0, percent: 20.0 },
|
|
viewport: { start: 0, end: 88200 },
|
|
wantSelection: { start: 44144, end: 52920 },
|
|
wantCurrentTime: 1.000997732426304,
|
|
},
|
|
])(
|
|
'$name',
|
|
({
|
|
audioSampleRate,
|
|
event,
|
|
playState,
|
|
position,
|
|
viewport,
|
|
wantSelection,
|
|
wantCurrentTime,
|
|
}) => {
|
|
it('generates the expected state', () => {
|
|
const mediaSet = MediaSet.fromPartial({
|
|
id: '123',
|
|
audioFrames: 441000,
|
|
audioSampleRate: audioSampleRate,
|
|
});
|
|
const state = stateReducer(
|
|
{
|
|
...initialState,
|
|
position,
|
|
mediaSet,
|
|
playState,
|
|
viewport,
|
|
},
|
|
{ type: 'waveformselectionchanged', event }
|
|
);
|
|
|
|
expect(state.selection).toEqual(wantSelection);
|
|
expect(state.currentTime).toEqual(wantCurrentTime);
|
|
});
|
|
}
|
|
);
|
|
});
|
|
});
|