135 lines
3.0 KiB
TypeScript
135 lines
3.0 KiB
TypeScript
import { useEffect, useState, useRef } from 'react';
|
|
import { Frames, VideoPosition, newRPC } from './App';
|
|
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
|
|
import { WaveformCanvas } from './WaveformCanvas';
|
|
import { secsToCanvasX } from './Helpers';
|
|
import { from, Observable } from 'rxjs';
|
|
import { bufferCount } from 'rxjs/operators';
|
|
|
|
interface Props {
|
|
mediaSet: MediaSet;
|
|
position: VideoPosition;
|
|
viewport: Frames;
|
|
offsetPixels: number;
|
|
}
|
|
|
|
export const CanvasLogicalWidth = 2000;
|
|
export const CanvasLogicalHeight = 500;
|
|
|
|
export const Waveform: React.FC<Props> = ({
|
|
mediaSet,
|
|
position,
|
|
viewport,
|
|
offsetPixels,
|
|
}: Props) => {
|
|
const [peaks, setPeaks] = useState<Observable<number[]>>(from([]));
|
|
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
|
|
|
// effects
|
|
|
|
// load peaks on MediaSet change
|
|
useEffect(() => {
|
|
(async function () {
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
|
|
if (viewport.start >= viewport.end) {
|
|
return;
|
|
}
|
|
|
|
console.log('fetch audio segment, frames', viewport);
|
|
|
|
const service = new MediaSetServiceClientImpl(newRPC());
|
|
const segment = await service.GetAudioSegment({
|
|
id: mediaSet.id,
|
|
numBins: CanvasLogicalWidth,
|
|
startFrame: viewport.start,
|
|
endFrame: viewport.end,
|
|
});
|
|
|
|
console.log('got segment', segment);
|
|
|
|
const peaks = from(segment.peaks).pipe(
|
|
bufferCount(mediaSet.audioChannels)
|
|
);
|
|
setPeaks(peaks);
|
|
})();
|
|
}, [viewport]);
|
|
|
|
// render HUD
|
|
useEffect(() => {
|
|
const canvas = hudCanvasRef.current;
|
|
if (canvas == null) {
|
|
return;
|
|
}
|
|
|
|
const ctx = canvas.getContext('2d');
|
|
if (ctx == null) {
|
|
console.error('no hud 2d context available');
|
|
return;
|
|
}
|
|
|
|
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
|
|
if (mediaSet == null) {
|
|
return;
|
|
}
|
|
|
|
const x = secsToCanvasX(
|
|
position.currentTime,
|
|
mediaSet.audioSampleRate,
|
|
viewport
|
|
);
|
|
if (x == null) {
|
|
return;
|
|
}
|
|
|
|
ctx.strokeStyle = 'red';
|
|
ctx.beginPath();
|
|
ctx.moveTo(x, 0);
|
|
ctx.lineWidth = 4;
|
|
ctx.lineTo(x, canvas.height);
|
|
ctx.stroke();
|
|
}, [mediaSet, position]);
|
|
|
|
// render component
|
|
|
|
const containerStyles = {
|
|
background: 'black',
|
|
margin: '0 ' + offsetPixels + 'px',
|
|
flexGrow: 1,
|
|
position: 'relative',
|
|
} as React.CSSProperties;
|
|
|
|
const canvasStyles = {
|
|
position: 'absolute',
|
|
width: '100%',
|
|
height: '100%',
|
|
display: 'block',
|
|
} as React.CSSProperties;
|
|
|
|
return (
|
|
<>
|
|
<div style={containerStyles}>
|
|
<WaveformCanvas
|
|
peaks={peaks}
|
|
channels={mediaSet.audioChannels}
|
|
width={CanvasLogicalWidth}
|
|
height={CanvasLogicalHeight}
|
|
strokeStyle="green"
|
|
fillStyle="black"
|
|
zIndex={0}
|
|
alpha={1}
|
|
></WaveformCanvas>
|
|
<canvas
|
|
width={CanvasLogicalWidth}
|
|
height={CanvasLogicalHeight}
|
|
ref={hudCanvasRef}
|
|
style={canvasStyles}
|
|
></canvas>
|
|
</div>
|
|
</>
|
|
);
|
|
};
|