Frontend fixes

This commit is contained in:
Rob Watson 2021-11-30 20:41:34 +01:00
parent 642ce6e349
commit 0cc1fd8272
7 changed files with 136 additions and 107 deletions

View File

@ -371,6 +371,11 @@ outer:
} }
func (s *MediaSetService) GetAudioSegment(ctx context.Context, id uuid.UUID, startFrame, endFrame int64, numBins int) ([]int16, error) { func (s *MediaSetService) GetAudioSegment(ctx context.Context, id uuid.UUID, startFrame, endFrame int64, numBins int) ([]int16, error) {
if startFrame < 0 || endFrame < 0 || numBins <= 0 {
s.logger.With("startFrame", startFrame, "endFrame", endFrame, "numBins", numBins).Error("invalid arguments")
return nil, errors.New("invalid arguments")
}
mediaSet, err := s.store.GetMediaSet(ctx, id) mediaSet, err := s.store.GetMediaSet(ctx, id)
if err != nil { if err != nil {
return nil, fmt.Errorf("error getting media set: %v", err) return nil, fmt.Errorf("error getting media set: %v", err)

View File

@ -15,6 +15,8 @@
# TODO: production build # TODO: production build
set -ex set -ex
export DOCKER_BUILDKIT=1
docker build \ docker build \
-t netfluxio/clipper-staging:latest \ -t netfluxio/clipper-staging:latest \
--build-arg API_URL=https://clipper-staging.netflux.io \ --build-arg API_URL=https://clipper-staging.netflux.io \

View File

@ -6,7 +6,7 @@ import {
GetAudioProgress, GetAudioProgress,
} from './generated/media_set'; } from './generated/media_set';
import { useState, useEffect } from 'react'; import { useState, useEffect, useRef } from 'react';
import { VideoPreview } from './VideoPreview'; import { VideoPreview } from './VideoPreview';
import { Overview, CanvasLogicalWidth } from './Overview'; import { Overview, CanvasLogicalWidth } from './Overview';
import { Waveform } from './Waveform'; import { Waveform } from './Waveform';
@ -14,8 +14,8 @@ import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar'; import { SeekBar } from './SeekBar';
import './App.css'; import './App.css';
import { Duration } from './generated/google/protobuf/duration'; import { Duration } from './generated/google/protobuf/duration';
import { from, Observable } from 'rxjs'; import { firstValueFrom, from, Observable } from 'rxjs';
import { map } from 'rxjs/operators'; import { first, map } from 'rxjs/operators';
// ported from backend, where should they live? // ported from backend, where should they live?
const thumbnailWidth = 177; const thumbnailWidth = 177;
@ -36,16 +36,22 @@ export interface VideoPosition {
percent: number; percent: number;
} }
const video = document.createElement('video');
const audio = document.createElement('audio');
function App(): JSX.Element { function App(): JSX.Element {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null); const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [video, _setVideo] = useState(document.createElement('video'));
const [audio, _setAudio] = useState(document.createElement('audio'));
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
const [viewport, setViewport] = useState({ start: 0, end: 0 }); const [viewport, setViewport] = useState({ start: 0, end: 0 });
const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>( const [overviewPeaks, setOverviewPeaks] = useState<Observable<number[]>>(
from([]) from([])
); );
// position stores the current playback position. positionRef makes it
// available inside a setInterval callback.
const [position, setPosition] = useState({ currentTime: 0, percent: 0 });
const positionRef = useRef(position);
positionRef.current = position;
// effects // effects
// TODO: error handling // TODO: error handling
@ -66,22 +72,25 @@ function App(): JSX.Element {
})(); })();
}, []); }, []);
const updatePlayerPositionIntevalMillis = 30;
// setup player on first page load only: // setup player on first page load only:
useEffect(() => { useEffect(() => {
if (mediaSet == null) { if (mediaSet == null) {
return; return;
} }
// assume mediaSet never changes once loaded const intervalID = setInterval(() => {
setInterval(() => { if (video.currentTime == positionRef.current.currentTime) {
if (video.currentTime == position.currentTime) {
return; return;
} }
const duration = mediaSet.audioFrames / mediaSet.audioSampleRate; const duration = mediaSet.audioFrames / mediaSet.audioSampleRate;
const percent = (video.currentTime / duration) * 100; const percent = (video.currentTime / duration) * 100;
setPosition({ currentTime: video.currentTime, percent: percent }); setPosition({ currentTime: video.currentTime, percent: percent });
}, 100); }, updatePlayerPositionIntevalMillis);
return () => clearInterval(intervalID);
}, [mediaSet]); }, [mediaSet]);
// load audio when MediaSet is loaded: // load audio when MediaSet is loaded:
@ -91,7 +100,6 @@ function App(): JSX.Element {
return; return;
} }
console.log('fetching audio...'); console.log('fetching audio...');
// TODO move this call to app.tsx, pass the stream in as a prop.
const service = new MediaSetServiceClientImpl(newRPC()); const service = new MediaSetServiceClientImpl(newRPC());
const audioProgressStream = service.GetAudio({ const audioProgressStream = service.GetAudio({
id: mediaSet.id, id: mediaSet.id,
@ -100,18 +108,15 @@ function App(): JSX.Element {
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks)); const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
setOverviewPeaks(peaks); setOverviewPeaks(peaks);
let url = ''; const pipe = audioProgressStream.pipe(
// TODO: probably a nicer way to do this. first((progress: GetAudioProgress) => progress.url != '')
await audioProgressStream.forEach((progress: GetAudioProgress) => { );
if (progress.url != '') { const progressWithURL = await firstValueFrom(pipe);
url = progress.url;
}
});
audio.src = url; audio.src = progressWithURL.url;
audio.muted = false; audio.muted = false;
audio.volume = 1; audio.volume = 1;
console.log('got audio URL', url); console.log('set audio src', progressWithURL.url);
})(); })();
}, [mediaSet]); }, [mediaSet]);
@ -122,21 +127,16 @@ function App(): JSX.Element {
return; return;
} }
console.log('getting video...'); console.log('fetching video...');
const rpc = newRPC(); const service = new MediaSetServiceClientImpl(newRPC());
const service = new MediaSetServiceClientImpl(rpc);
const videoProgressStream = service.GetVideo({ id: mediaSet.id }); const videoProgressStream = service.GetVideo({ id: mediaSet.id });
const pipe = videoProgressStream.pipe(
first((progress: GetVideoProgress) => progress.url != '')
);
const progressWithURL = await firstValueFrom(pipe);
let url = ''; video.src = progressWithURL.url;
// TODO: probably a nicer way to do this. console.log('set video src', progressWithURL.url);
await videoProgressStream.forEach((progress: GetVideoProgress) => {
if (progress.url != '') {
url = progress.url;
}
});
video.src = url;
console.log('set video src', video.src);
})(); })();
}, [mediaSet]); }, [mediaSet]);
@ -160,16 +160,22 @@ function App(): JSX.Element {
// handlers // handlers
const handleOverviewSelectionChange = (selection: Frames) => { const handleOverviewSelectionChange = (newViewport: Frames) => {
console.log('in handleOverviewSelectionChange', selection);
if (mediaSet == null) { if (mediaSet == null) {
return; return;
} }
console.log('set new viewport', newViewport);
setViewport({ ...newViewport });
setViewport({ if (!audio.paused) {
start: mediaSet.audioFrames * (selection.start / 100), return;
end: mediaSet.audioFrames * (selection.end / 100), }
});
const ratio = newViewport.start / mediaSet.audioFrames;
const currentTime =
(mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio;
audio.currentTime = currentTime;
video.currentTime = currentTime;
}; };
// render component // render component
@ -211,6 +217,7 @@ function App(): JSX.Element {
mediaSet={mediaSet} mediaSet={mediaSet}
offsetPixels={offsetPixels} offsetPixels={offsetPixels}
height={80} height={80}
viewport={viewport}
position={position} position={position}
onSelectionChange={handleOverviewSelectionChange} onSelectionChange={handleOverviewSelectionChange}
/> />

View File

@ -45,7 +45,7 @@ export const secsToCanvasX = (
sampleRate: number, sampleRate: number,
viewport: Frames viewport: Frames
): number | null => { ): number | null => {
const frame = Math.floor(secs * sampleRate); const frame = Math.round(secs * sampleRate);
if (frame < viewport.start || frame > viewport.end) { if (frame < viewport.start || frame > viewport.end) {
return null; return null;
} }

View File

@ -15,6 +15,7 @@ interface Props {
height: number; height: number;
offsetPixels: number; offsetPixels: number;
position: VideoPosition; position: VideoPosition;
viewport: Frames;
onSelectionChange: (selection: Frames) => void; onSelectionChange: (selection: Frames) => void;
} }
@ -44,20 +45,23 @@ export const Overview: React.FC<Props> = ({
height, height,
offsetPixels, offsetPixels,
position, position,
viewport,
onSelectionChange, onSelectionChange,
}: Props) => { }: Props) => {
const hudCanvasRef = useRef<HTMLCanvasElement>(null); const hudCanvasRef = useRef<HTMLCanvasElement>(null);
const [mode, setMode] = useState(Mode.Normal); const [mode, setMode] = useState(Mode.Normal);
const [hoverState, setHoverState] = useState(HoverState.Normal); const [hoverState, setHoverState] = useState(HoverState.Normal);
const [cursor, setCursor] = useState('auto');
// selection and newSelection relate to canvas logical pixels:
const [selection, setSelection] = useState({ ...emptySelection });
const [newSelection, setNewSelection] = useState({ const [newSelection, setNewSelection] = useState({
...emptySelection, ...emptySelection,
}); });
const [selection, setSelection] = useState({ start: 0, end: 100 });
const [cursor, setCursor] = useState('auto');
const moveOffsetX = useRef(0); const moveOffsetX = useRef(0);
// effects // side effects
// handle global mouse up. // handle global mouse up.
useEffect(() => { useEffect(() => {
@ -67,30 +71,21 @@ export const Overview: React.FC<Props> = ({
}; };
}, [mode, newSelection]); }, [mode, newSelection]);
// publish onSelectionChange event // set selection state on viewport change
useEffect(() => { useEffect(() => {
if (mediaSet == null) { if (mediaSet == null) {
return; return;
} }
const canvas = hudCanvasRef.current;
if (canvas == null) {
console.error('no hud canvas ref available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
const width = canvas.getBoundingClientRect().width; setSelection({
const selectionPercent = { start: Math.round(
start: (selection.start / width) * 100, (viewport.start / mediaSet.audioFrames) * CanvasLogicalWidth
end: (selection.end / width) * 100, ),
}; end: Math.round(
(viewport.end / mediaSet.audioFrames) * CanvasLogicalWidth
onSelectionChange(selectionPercent); ),
}, [selection]); });
}, [mediaSet, viewport]);
// load peaks on mediaset change // load peaks on mediaset change
useEffect(() => { useEffect(() => {
@ -140,44 +135,65 @@ export const Overview: React.FC<Props> = ({
currentSelection = selection; currentSelection = selection;
} }
const elementWidth = canvas.getBoundingClientRect().width;
const start =
(currentSelection.start / elementWidth) * CanvasLogicalWidth;
const end = (currentSelection.end / elementWidth) * CanvasLogicalWidth;
ctx.beginPath(); ctx.beginPath();
ctx.strokeStyle = 'red'; ctx.strokeStyle = 'red';
ctx.lineWidth = 4; ctx.lineWidth = 4;
const alpha = hoverState == HoverState.OverSelection ? '0.15' : '0.13'; const alpha = hoverState == HoverState.OverSelection ? '0.15' : '0.13';
ctx.fillStyle = `rgba(255, 255, 255, ${alpha})`; ctx.fillStyle = `rgba(255, 255, 255, ${alpha})`;
ctx.rect(start, 2, end - start, canvas.height - 10); ctx.rect(
currentSelection.start,
2,
currentSelection.end - currentSelection.start,
canvas.height - 10
);
ctx.fill(); ctx.fill();
ctx.stroke(); ctx.stroke();
// draw position marker // draw position marker
const markerX = canvas.width * (position.percent / 100); const markerX = canvas.width * (position.percent / 100);
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(markerX, 0); ctx.moveTo(markerX, 0);
ctx.lineWidth = 4; ctx.lineWidth = 4;
ctx.lineTo(markerX, canvas.height - 4); ctx.lineTo(markerX, canvas.height - 4);
ctx.stroke(); ctx.stroke();
}); });
}); }, [mediaSet, selection, newSelection, position]);
// handlers // handlers
const isHoveringSelectionStart = (elementX: number): boolean => { const hoverOffset = 10;
return elementX > selection.start - 10 && elementX < selection.start + 10;
const isHoveringSelectionStart = (x: number): boolean => {
return (
x > selection.start - hoverOffset && x < selection.start + hoverOffset
);
}; };
const isHoveringSelectionEnd = (elementX: number): boolean => { const isHoveringSelectionEnd = (x: number): boolean => {
return elementX > selection.end - 10 && elementX < selection.end + 10; return x > selection.end - hoverOffset && x < selection.end + hoverOffset;
}; };
const isHoveringSelection = (elementX: number): boolean => { const isHoveringSelection = (x: number): boolean => {
return elementX >= selection.start && elementX <= selection.end; return x >= selection.start && x <= selection.end;
};
const getCanvasX = (evt: MouseEvent<HTMLCanvasElement>): number => {
const rect = evt.currentTarget.getBoundingClientRect();
const x = Math.round(
((evt.clientX - rect.left) / rect.width) * CanvasLogicalWidth
);
return constrainXToCanvas(x);
};
const constrainXToCanvas = (x: number): number => {
if (x < 0) {
return 0;
}
if (x > CanvasLogicalWidth) {
return CanvasLogicalWidth;
}
return x;
}; };
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => { const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
@ -185,35 +201,31 @@ export const Overview: React.FC<Props> = ({
return; return;
} }
const elementX = Math.round( const x = getCanvasX(evt);
evt.clientX - evt.currentTarget.getBoundingClientRect().x
);
if (isHoveringSelectionStart(elementX)) { if (isHoveringSelectionStart(x)) {
setMode(Mode.ResizingStart); setMode(Mode.ResizingStart);
moveOffsetX.current = elementX; moveOffsetX.current = x;
return; return;
} else if (isHoveringSelectionEnd(elementX)) { } else if (isHoveringSelectionEnd(x)) {
setMode(Mode.ResizingEnd); setMode(Mode.ResizingEnd);
moveOffsetX.current = elementX; moveOffsetX.current = x;
return; return;
} else if (isHoveringSelection(elementX)) { } else if (isHoveringSelection(x)) {
setMode(Mode.Dragging); setMode(Mode.Dragging);
setCursor('pointer'); setCursor('pointer');
moveOffsetX.current = elementX; moveOffsetX.current = x;
return; return;
} }
setMode(Mode.Selecting); setMode(Mode.Selecting);
setCursor('col-resize'); setCursor('col-resize');
moveOffsetX.current = elementX; moveOffsetX.current = x;
setNewSelection({ start: elementX, end: elementX }); setNewSelection({ start: x, end: x });
}; };
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => { const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
const x = Math.round( const x = getCanvasX(evt);
evt.clientX - evt.currentTarget.getBoundingClientRect().x
);
switch (mode) { switch (mode) {
case Mode.Normal: { case Mode.Normal: {
@ -233,7 +245,7 @@ export const Overview: React.FC<Props> = ({
} }
case Mode.ResizingStart: { case Mode.ResizingStart: {
const diff = x - moveOffsetX.current; const diff = x - moveOffsetX.current;
const start = selection.start + diff; const start = constrainXToCanvas(selection.start + diff);
if (start > selection.end) { if (start > selection.end) {
setNewSelection({ start: selection.end, end: start }); setNewSelection({ start: selection.end, end: start });
@ -245,7 +257,7 @@ export const Overview: React.FC<Props> = ({
} }
case Mode.ResizingEnd: { case Mode.ResizingEnd: {
const diff = x - moveOffsetX.current; const diff = x - moveOffsetX.current;
const start = selection.end + diff; const start = constrainXToCanvas(selection.end + diff);
if (start < selection.start) { if (start < selection.start) {
setNewSelection({ start: Math.max(0, start), end: selection.start }); setNewSelection({ start: Math.max(0, start), end: selection.start });
@ -260,8 +272,8 @@ export const Overview: React.FC<Props> = ({
const width = selection.end - selection.start; const width = selection.end - selection.start;
let start = Math.max(0, selection.start + diff); let start = Math.max(0, selection.start + diff);
let end = start + width; let end = start + width;
if (end > evt.currentTarget.getBoundingClientRect().width) { if (end > CanvasLogicalWidth) {
end = evt.currentTarget.getBoundingClientRect().width; end = CanvasLogicalWidth;
start = end - width; start = end - width;
} }
@ -290,16 +302,13 @@ export const Overview: React.FC<Props> = ({
setMode(Mode.Normal); setMode(Mode.Normal);
setCursor('auto'); setCursor('auto');
if (newSelection.start == newSelection.end) { const start = Math.round(
setSelection({ start: newSelection.start, end: newSelection.end + 5 }); (newSelection.start / CanvasLogicalWidth) * mediaSet.audioFrames
return; );
} const end = Math.round(
(newSelection.end / CanvasLogicalWidth) * mediaSet.audioFrames
if (newSelection.start == newSelection.end) { );
setSelection({ ...emptySelection }); onSelectionChange({ start, end });
return;
}
setSelection({ ...newSelection });
}; };
const handleMouseLeave = (_evt: MouseEvent<HTMLCanvasElement>) => { const handleMouseLeave = (_evt: MouseEvent<HTMLCanvasElement>) => {

View File

@ -38,7 +38,7 @@ export const Waveform: React.FC<Props> = ({
return; return;
} }
console.log('fetch audio segment...'); console.log('fetch audio segment, frames', viewport);
const service = new MediaSetServiceClientImpl(newRPC()); const service = new MediaSetServiceClientImpl(newRPC());
const segment = await service.GetAudioSegment({ const segment = await service.GetAudioSegment({
@ -76,7 +76,11 @@ export const Waveform: React.FC<Props> = ({
return; return;
} }
const x = secsToCanvasX(position.currentTime, mediaSet.audioSampleRate, viewport); const x = secsToCanvasX(
position.currentTime,
mediaSet.audioSampleRate,
viewport
);
if (x == null) { if (x == null) {
return; return;
} }
@ -87,7 +91,7 @@ export const Waveform: React.FC<Props> = ({
ctx.lineWidth = 4; ctx.lineWidth = 4;
ctx.lineTo(x, canvas.height); ctx.lineTo(x, canvas.height);
ctx.stroke(); ctx.stroke();
}, [viewport, position]); }, [mediaSet, position]);
// render component // render component

View File

@ -5,6 +5,8 @@ option go_package = "pb/media_set";
import "google/protobuf/duration.proto"; import "google/protobuf/duration.proto";
// TODO: use uints where appropriate.
message MediaSet { message MediaSet {
string id = 1; string id = 1;
string youtube_id = 2; string youtube_id = 2;