Refactor Overview component
This commit is contained in:
parent
2d9f2d80b2
commit
b3559bb94e
|
@ -26,10 +26,15 @@ export interface Frames {
|
|||
end: number;
|
||||
}
|
||||
|
||||
export interface VideoPosition {
|
||||
currentTime: number;
|
||||
percent: number;
|
||||
}
|
||||
|
||||
function App(): JSX.Element {
|
||||
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
|
||||
const [video, _setVideo] = useState(document.createElement('video'));
|
||||
const [position, setPosition] = useState(0);
|
||||
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
|
||||
const [viewport, setViewport] = useState({ start: 0, end: 0 });
|
||||
|
||||
// effects
|
||||
|
@ -54,10 +59,21 @@ function App(): JSX.Element {
|
|||
|
||||
// setup player on first page load only:
|
||||
useEffect(() => {
|
||||
if (mediaSet == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// assume mediaSet never changes once loaded
|
||||
setInterval(() => {
|
||||
setPosition(video.currentTime);
|
||||
if (video.currentTime == position.currentTime) {
|
||||
return;
|
||||
}
|
||||
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
|
||||
const percent = (video.currentTime / duration) * 100;
|
||||
|
||||
setPosition({ currentTime: video.currentTime, percent: percent });
|
||||
}, 100);
|
||||
}, []);
|
||||
}, [mediaSet]);
|
||||
|
||||
// load video when MediaSet is loaded:
|
||||
useEffect(() => {
|
||||
|
@ -112,12 +128,10 @@ function App(): JSX.Element {
|
|||
return;
|
||||
}
|
||||
|
||||
if (selection.start >= selection.end) {
|
||||
setViewport({ start: 0, end: mediaSet.audioFrames });
|
||||
return;
|
||||
}
|
||||
|
||||
setViewport({ ...selection });
|
||||
setViewport({
|
||||
start: mediaSet.audioFrames * (selection.start / 100),
|
||||
end: mediaSet.audioFrames * (selection.end / 100),
|
||||
});
|
||||
};
|
||||
|
||||
// render component
|
||||
|
@ -157,10 +171,6 @@ function App(): JSX.Element {
|
|||
offsetPixels={offsetPixels}
|
||||
height={80}
|
||||
position={position}
|
||||
selection={viewport}
|
||||
onSelectionStart={() => {
|
||||
// empty
|
||||
}}
|
||||
onSelectionChange={handleOverviewSelectionChange}
|
||||
/>
|
||||
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
import { useState, useEffect, useRef, MouseEvent } from 'react';
|
||||
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
|
||||
import { Frames, newRPC } from './App';
|
||||
import { Frames, newRPC, VideoPosition } from './App';
|
||||
import { WaveformCanvas } from './WaveformCanvas';
|
||||
import { mouseEventToCanvasX } from './Helpers';
|
||||
import { secsToCanvasX } from './Helpers';
|
||||
import { from, Observable } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
export interface Selection {
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
mediaSet: MediaSet;
|
||||
height: number;
|
||||
offsetPixels: number;
|
||||
position: number;
|
||||
selection: Frames;
|
||||
onSelectionStart: (x1: number) => void;
|
||||
position: VideoPosition;
|
||||
onSelectionChange: (selection: Frames) => void;
|
||||
}
|
||||
|
||||
|
@ -21,6 +22,15 @@ enum Mode {
|
|||
Normal,
|
||||
Selecting,
|
||||
Dragging,
|
||||
ResizingStart,
|
||||
ResizingEnd,
|
||||
}
|
||||
|
||||
enum HoverState {
|
||||
Normal,
|
||||
OverSelectionStart,
|
||||
OverSelectionEnd,
|
||||
OverSelection,
|
||||
}
|
||||
|
||||
const CanvasLogicalWidth = 2_000;
|
||||
|
@ -28,28 +38,28 @@ const CanvasLogicalHeight = 500;
|
|||
|
||||
const emptySelection = { start: 0, end: 0 };
|
||||
|
||||
// TODO: render position marker during playback
|
||||
export const Overview: React.FC<Props> = ({
|
||||
mediaSet,
|
||||
height,
|
||||
offsetPixels,
|
||||
position,
|
||||
selection,
|
||||
onSelectionStart,
|
||||
onSelectionChange,
|
||||
}: Props) => {
|
||||
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const [peaks, setPeaks] = useState<Observable<number[]>>(from([]));
|
||||
const [mode, setMode] = useState(Mode.Normal);
|
||||
const [newSelection, setNewSelection] = useState({ ...emptySelection });
|
||||
const [dragStart, setDragStart] = useState(0);
|
||||
const [hoverState, setHoverState] = useState(HoverState.Normal);
|
||||
const [newSelection, setNewSelection] = useState({
|
||||
...emptySelection,
|
||||
});
|
||||
const [selection, setSelection] = useState({ start: 0, end: 100 });
|
||||
const [cursor, setCursor] = useState('auto');
|
||||
|
||||
const moveOffsetX = useRef(0);
|
||||
|
||||
// effects
|
||||
|
||||
// handle global mouse up.
|
||||
// Currently this adds and removes the global event listener every time the
|
||||
// component is rerendered (which is often when dragging or redrawing). It
|
||||
// works but probably better to optimize this for performance reasons.
|
||||
useEffect(() => {
|
||||
window.addEventListener('mouseup', handleMouseUp);
|
||||
return () => {
|
||||
|
@ -57,32 +67,53 @@ export const Overview: React.FC<Props> = ({
|
|||
};
|
||||
}, [mode, newSelection]);
|
||||
|
||||
// load peaks on mediaset change
|
||||
// publish onSelectionChange event
|
||||
useEffect(() => {
|
||||
(async function () {
|
||||
if (mediaSet == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const canvas = hudCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error('no hud canvas ref available');
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx == null) {
|
||||
console.error('no hud 2d context available');
|
||||
return;
|
||||
}
|
||||
|
||||
const width = canvas.getBoundingClientRect().width;
|
||||
const selectionPercent = {
|
||||
start: (selection.start / width) * 100,
|
||||
end: (selection.end / width) * 100,
|
||||
};
|
||||
|
||||
onSelectionChange(selectionPercent);
|
||||
}, [selection]);
|
||||
|
||||
// load peaks on mediaset change
|
||||
useEffect(() => {
|
||||
(async function () {
|
||||
if (mediaSet == null) {
|
||||
return;
|
||||
}
|
||||
const canvas = hudCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error('no hud canvas ref available');
|
||||
return;
|
||||
}
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx == null) {
|
||||
console.error('no hud 2d context available');
|
||||
return;
|
||||
}
|
||||
console.log('fetching audio...');
|
||||
const service = new MediaSetServiceClientImpl(newRPC());
|
||||
const audioProgressStream = service.GetAudio({
|
||||
id: mediaSet.id,
|
||||
numBins: CanvasLogicalWidth,
|
||||
});
|
||||
|
||||
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
|
||||
setPeaks(peaks);
|
||||
})();
|
||||
|
@ -90,123 +121,173 @@ export const Overview: React.FC<Props> = ({
|
|||
|
||||
// draw the overview HUD
|
||||
useEffect(() => {
|
||||
(async function () {
|
||||
requestAnimationFrame(() => {
|
||||
const canvas = hudCanvasRef.current;
|
||||
if (canvas == null) {
|
||||
console.error('no hud canvas ref available');
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
if (ctx == null) {
|
||||
console.error('no hud 2d context available');
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
// draw selection:
|
||||
let currentSelection: Frames;
|
||||
if (mode == Mode.Selecting || mode == Mode.Dragging) {
|
||||
// draw selection
|
||||
|
||||
let currentSelection: Selection;
|
||||
if (
|
||||
mode == Mode.Selecting ||
|
||||
mode == Mode.Dragging ||
|
||||
mode == Mode.ResizingStart ||
|
||||
mode == Mode.ResizingEnd
|
||||
) {
|
||||
currentSelection = newSelection;
|
||||
} else {
|
||||
currentSelection = selection;
|
||||
}
|
||||
|
||||
if (currentSelection.start < currentSelection.end) {
|
||||
const x1 =
|
||||
(currentSelection.start / mediaSet.audioFrames) * CanvasLogicalWidth;
|
||||
const x2 =
|
||||
(currentSelection.end / mediaSet.audioFrames) * CanvasLogicalWidth;
|
||||
const elementWidth = canvas.getBoundingClientRect().width;
|
||||
const start =
|
||||
(currentSelection.start / elementWidth) * CanvasLogicalWidth;
|
||||
const end = (currentSelection.end / elementWidth) * CanvasLogicalWidth;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.strokeStyle = 'red';
|
||||
ctx.lineWidth = 4;
|
||||
ctx.fillStyle = 'rgba(255, 255, 255, 0.15)';
|
||||
ctx.rect(x1, 2, x2 - x1, canvas.height - 10);
|
||||
const alpha = hoverState == HoverState.OverSelection ? '0.15' : '0.13';
|
||||
ctx.fillStyle = `rgba(255, 255, 255, ${alpha})`;
|
||||
ctx.rect(start, 2, end - start, canvas.height - 10);
|
||||
ctx.fill();
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
// draw position marker:
|
||||
const fullSelection = { start: 0, end: mediaSet.audioFrames }; // constantize?
|
||||
const x = secsToCanvasX(
|
||||
position,
|
||||
mediaSet.audioSampleRate,
|
||||
fullSelection
|
||||
);
|
||||
// should never happen:
|
||||
if (x == null) {
|
||||
return;
|
||||
}
|
||||
// draw position marker
|
||||
|
||||
const markerX = canvas.width * (position.percent / 100);
|
||||
|
||||
ctx.strokeStyle = 'red';
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, 0);
|
||||
ctx.moveTo(markerX, 0);
|
||||
ctx.lineWidth = 4;
|
||||
ctx.lineTo(x, canvas.height - 4);
|
||||
ctx.lineTo(markerX, canvas.height - 4);
|
||||
ctx.stroke();
|
||||
})();
|
||||
});
|
||||
});
|
||||
|
||||
// publish event on new selection start
|
||||
useEffect(() => {
|
||||
onSelectionStart(newSelection.start);
|
||||
}, [newSelection]);
|
||||
|
||||
// handlers
|
||||
|
||||
const isHoveringSelectionStart = (elementX: number): boolean => {
|
||||
return elementX > selection.start - 10 && elementX < selection.start + 10;
|
||||
};
|
||||
|
||||
const isHoveringSelectionEnd = (elementX: number): boolean => {
|
||||
return elementX > selection.end - 10 && elementX < selection.end + 10;
|
||||
};
|
||||
|
||||
const isHoveringSelection = (elementX: number): boolean => {
|
||||
return elementX >= selection.start && elementX <= selection.end;
|
||||
};
|
||||
|
||||
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
|
||||
if (mode != Mode.Normal) {
|
||||
return;
|
||||
}
|
||||
|
||||
const frame = Math.floor(
|
||||
mediaSet.audioFrames *
|
||||
(mouseEventToCanvasX(evt) / evt.currentTarget.width)
|
||||
const elementX = Math.round(
|
||||
evt.clientX - evt.currentTarget.getBoundingClientRect().x
|
||||
);
|
||||
|
||||
if (frame >= selection.start && frame < selection.end) {
|
||||
if (isHoveringSelectionStart(elementX)) {
|
||||
setMode(Mode.ResizingStart);
|
||||
moveOffsetX.current = elementX;
|
||||
return;
|
||||
} else if (isHoveringSelectionEnd(elementX)) {
|
||||
setMode(Mode.ResizingEnd);
|
||||
moveOffsetX.current = elementX;
|
||||
return;
|
||||
} else if (isHoveringSelection(elementX)) {
|
||||
setMode(Mode.Dragging);
|
||||
setDragStart(frame);
|
||||
setCursor('pointer');
|
||||
moveOffsetX.current = elementX;
|
||||
return;
|
||||
}
|
||||
|
||||
setMode(Mode.Selecting);
|
||||
setNewSelection({ start: frame, end: frame });
|
||||
setCursor('col-resize');
|
||||
moveOffsetX.current = elementX;
|
||||
setNewSelection({ start: elementX, end: elementX });
|
||||
};
|
||||
|
||||
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
|
||||
if (mode == Mode.Normal) {
|
||||
return;
|
||||
}
|
||||
|
||||
const frame = Math.floor(
|
||||
mediaSet.audioFrames *
|
||||
(mouseEventToCanvasX(evt) / evt.currentTarget.width)
|
||||
const x = Math.round(
|
||||
evt.clientX - evt.currentTarget.getBoundingClientRect().x
|
||||
);
|
||||
|
||||
if (mode == Mode.Dragging) {
|
||||
const diff = frame - dragStart;
|
||||
const frameCount = selection.end - selection.start;
|
||||
switch (mode) {
|
||||
case Mode.Normal: {
|
||||
if (isHoveringSelectionStart(x)) {
|
||||
setHoverState(HoverState.OverSelectionStart);
|
||||
setCursor('col-resize');
|
||||
} else if (isHoveringSelectionEnd(x)) {
|
||||
setHoverState(HoverState.OverSelectionEnd);
|
||||
setCursor('col-resize');
|
||||
} else if (isHoveringSelection(x)) {
|
||||
setHoverState(HoverState.OverSelection);
|
||||
setCursor('pointer');
|
||||
} else {
|
||||
setCursor('auto');
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Mode.ResizingStart: {
|
||||
const diff = x - moveOffsetX.current;
|
||||
const start = selection.start + diff;
|
||||
|
||||
if (start > selection.end) {
|
||||
setNewSelection({ start: selection.end, end: start });
|
||||
break;
|
||||
}
|
||||
|
||||
setNewSelection({ ...newSelection, start: start });
|
||||
break;
|
||||
}
|
||||
case Mode.ResizingEnd: {
|
||||
const diff = x - moveOffsetX.current;
|
||||
const start = selection.end + diff;
|
||||
|
||||
if (start < selection.start) {
|
||||
setNewSelection({ start: Math.max(0, start), end: selection.start });
|
||||
break;
|
||||
}
|
||||
|
||||
setNewSelection({ ...newSelection, end: start });
|
||||
break;
|
||||
}
|
||||
case Mode.Dragging: {
|
||||
const diff = x - moveOffsetX.current;
|
||||
const width = selection.end - selection.start;
|
||||
let start = Math.max(0, selection.start + diff);
|
||||
let end = start + frameCount;
|
||||
if (end > mediaSet.audioFrames) {
|
||||
end = mediaSet.audioFrames;
|
||||
start = end - frameCount;
|
||||
let end = start + width;
|
||||
if (end > evt.currentTarget.getBoundingClientRect().width) {
|
||||
end = evt.currentTarget.getBoundingClientRect().width;
|
||||
start = end - width;
|
||||
}
|
||||
|
||||
setNewSelection({ start: start, end: end });
|
||||
break;
|
||||
}
|
||||
case Mode.Selecting: {
|
||||
if (x < moveOffsetX.current) {
|
||||
setNewSelection({
|
||||
start: start,
|
||||
end: end,
|
||||
start: x,
|
||||
end: moveOffsetX.current,
|
||||
});
|
||||
return;
|
||||
} else {
|
||||
setNewSelection({ start: moveOffsetX.current, end: x });
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (frame == newSelection.end) {
|
||||
return;
|
||||
}
|
||||
|
||||
setNewSelection({ ...newSelection, end: frame });
|
||||
};
|
||||
|
||||
const handleMouseUp = () => {
|
||||
|
@ -215,7 +296,22 @@ export const Overview: React.FC<Props> = ({
|
|||
}
|
||||
|
||||
setMode(Mode.Normal);
|
||||
onSelectionChange({ ...newSelection });
|
||||
setCursor('auto');
|
||||
|
||||
if (newSelection.start == newSelection.end) {
|
||||
setSelection({ start: newSelection.start, end: newSelection.end + 5 });
|
||||
return;
|
||||
}
|
||||
|
||||
if (newSelection.start == newSelection.end) {
|
||||
setSelection({ ...emptySelection });
|
||||
return;
|
||||
}
|
||||
setSelection({ ...newSelection });
|
||||
};
|
||||
|
||||
const handleMouseLeave = (_evt: MouseEvent<HTMLCanvasElement>) => {
|
||||
setHoverState(HoverState.Normal);
|
||||
};
|
||||
|
||||
// render component
|
||||
|
@ -233,6 +329,7 @@ export const Overview: React.FC<Props> = ({
|
|||
height: '100%',
|
||||
display: 'block',
|
||||
zIndex: 2,
|
||||
cursor: cursor,
|
||||
} as React.CSSProperties;
|
||||
|
||||
return (
|
||||
|
@ -255,6 +352,7 @@ export const Overview: React.FC<Props> = ({
|
|||
style={hudCanvasStyles}
|
||||
onMouseDown={handleMouseDown}
|
||||
onMouseMove={handleMouseMove}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
></canvas>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { MediaSet, MediaSetServiceClientImpl } from './generated/media_set';
|
||||
import { newRPC } from './App';
|
||||
import { newRPC, VideoPosition } from './App';
|
||||
import { useEffect, useRef } from 'react';
|
||||
|
||||
interface Props {
|
||||
mediaSet: MediaSet;
|
||||
position: number;
|
||||
position: VideoPosition;
|
||||
duration: number;
|
||||
height: number;
|
||||
video: HTMLVideoElement;
|
||||
|
@ -46,7 +46,7 @@ export const VideoPreview: React.FC<Props> = ({
|
|||
// trying to render the video. The most important use case is before a
|
||||
// click event has happened, when autoplay restrictions will prevent
|
||||
// the video being rendered to canvas.
|
||||
if (position == 0) {
|
||||
if (position.currentTime == 0) {
|
||||
const service = new MediaSetServiceClientImpl(newRPC());
|
||||
const thumbnail = await service.GetVideoThumbnail({
|
||||
id: mediaSet.id,
|
||||
|
@ -65,14 +65,14 @@ export const VideoPreview: React.FC<Props> = ({
|
|||
|
||||
// otherwise, render the video, which (should) work now.
|
||||
const durSecs = duration / 1000;
|
||||
const ratio = position / durSecs;
|
||||
const ratio = position.currentTime / durSecs;
|
||||
const x = (canvas.width - 177) * ratio;
|
||||
ctx.clearRect(0, 0, x, canvas.height);
|
||||
ctx.clearRect(x + 177, 0, canvas.width - 177 - x, canvas.height);
|
||||
ctx.drawImage(video, x, 0, 177, 100);
|
||||
})();
|
||||
});
|
||||
}, [mediaSet, position]);
|
||||
}, [mediaSet, position.currentTime]);
|
||||
|
||||
// render component
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { useEffect, useState, useRef } from 'react';
|
||||
import { Frames, newRPC } from './App';
|
||||
import { Frames, VideoPosition, newRPC } from './App';
|
||||
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
|
||||
import { WaveformCanvas } from './WaveformCanvas';
|
||||
import { secsToCanvasX } from './Helpers';
|
||||
|
@ -8,7 +8,7 @@ import { bufferCount } from 'rxjs/operators';
|
|||
|
||||
interface Props {
|
||||
mediaSet: MediaSet;
|
||||
position: number;
|
||||
position: VideoPosition;
|
||||
viewport: Frames;
|
||||
offsetPixels: number;
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ export const Waveform: React.FC<Props> = ({
|
|||
return;
|
||||
}
|
||||
|
||||
const x = secsToCanvasX(position, mediaSet.audioSampleRate, viewport);
|
||||
const x = secsToCanvasX(position.currentTime, mediaSet.audioSampleRate, viewport);
|
||||
if (x == null) {
|
||||
return;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue