Render WaveformCanvas via peaks
This commit is contained in:
parent
97a55632ef
commit
c1ac075a88
|
@ -25,7 +25,10 @@ func main() {
|
||||||
|
|
||||||
// Create a store
|
// Create a store
|
||||||
databaseURL := os.Getenv("DATABASE_URL")
|
databaseURL := os.Getenv("DATABASE_URL")
|
||||||
log.Printf("DATABASE_URL = %s", databaseURL)
|
if databaseURL == "" {
|
||||||
|
log.Fatal("DATABASE_URL not set")
|
||||||
|
}
|
||||||
|
|
||||||
db, err := sql.Open("postgres", databaseURL)
|
db, err := sql.Open("postgres", databaseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"math"
|
||||||
)
|
)
|
||||||
|
|
||||||
type GetAudioProgress struct {
|
type GetAudioProgress struct {
|
||||||
|
@ -21,6 +22,7 @@ type GetAudioProgressReader interface {
|
||||||
// signed int16s and, given a target number of bins, emits a stream of peaks
|
// signed int16s and, given a target number of bins, emits a stream of peaks
|
||||||
// corresponding to each channel of the audio data.
|
// corresponding to each channel of the audio data.
|
||||||
type fetchAudioProgressReader struct {
|
type fetchAudioProgressReader struct {
|
||||||
|
byteOrder binary.ByteOrder
|
||||||
framesExpected int64
|
framesExpected int64
|
||||||
channels int
|
channels int
|
||||||
framesPerBin int
|
framesPerBin int
|
||||||
|
@ -34,11 +36,12 @@ type fetchAudioProgressReader struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: validate inputs, debugging is confusing otherwise
|
// TODO: validate inputs, debugging is confusing otherwise
|
||||||
func newGetAudioProgressReader(framesExpected int64, channels, numBins int) *fetchAudioProgressReader {
|
func newGetAudioProgressReader(byteOrder binary.ByteOrder, framesExpected int64, channels, numBins int) *fetchAudioProgressReader {
|
||||||
return &fetchAudioProgressReader{
|
return &fetchAudioProgressReader{
|
||||||
|
byteOrder: byteOrder,
|
||||||
channels: channels,
|
channels: channels,
|
||||||
framesExpected: framesExpected,
|
framesExpected: framesExpected,
|
||||||
framesPerBin: int(framesExpected / int64(numBins)),
|
framesPerBin: int(math.Ceil(float64(framesExpected) / float64(numBins))),
|
||||||
samples: make([]int16, 8_192),
|
samples: make([]int16, 8_192),
|
||||||
currPeaks: make([]int16, channels),
|
currPeaks: make([]int16, channels),
|
||||||
progress: make(chan GetAudioProgress),
|
progress: make(chan GetAudioProgress),
|
||||||
|
@ -55,6 +58,20 @@ func (w *fetchAudioProgressReader) Close() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (w *fetchAudioProgressReader) Read() (GetAudioProgress, error) {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case progress, ok := <-w.progress:
|
||||||
|
if !ok {
|
||||||
|
return GetAudioProgress{}, io.EOF
|
||||||
|
}
|
||||||
|
return progress, nil
|
||||||
|
case err := <-w.errorChan:
|
||||||
|
return GetAudioProgress{}, fmt.Errorf("error waiting for progress: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (w *fetchAudioProgressReader) Write(p []byte) (int, error) {
|
func (w *fetchAudioProgressReader) Write(p []byte) (int, error) {
|
||||||
// expand our target slice if it is of insufficient size:
|
// expand our target slice if it is of insufficient size:
|
||||||
numSamples := len(p) / SizeOfInt16
|
numSamples := len(p) / SizeOfInt16
|
||||||
|
@ -64,7 +81,7 @@ func (w *fetchAudioProgressReader) Write(p []byte) (int, error) {
|
||||||
|
|
||||||
samples := w.samples[:numSamples]
|
samples := w.samples[:numSamples]
|
||||||
|
|
||||||
if err := binary.Read(bytes.NewReader(p), binary.LittleEndian, samples); err != nil {
|
if err := binary.Read(bytes.NewReader(p), w.byteOrder, samples); err != nil {
|
||||||
return 0, fmt.Errorf("error parsing samples: %v", err)
|
return 0, fmt.Errorf("error parsing samples: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,17 +120,3 @@ func (w *fetchAudioProgressReader) nextBin() {
|
||||||
}
|
}
|
||||||
w.framesProcessed++
|
w.framesProcessed++
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *fetchAudioProgressReader) Read() (GetAudioProgress, error) {
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case progress, ok := <-w.progress:
|
|
||||||
if !ok {
|
|
||||||
return GetAudioProgress{}, io.EOF
|
|
||||||
}
|
|
||||||
return progress, nil
|
|
||||||
case err := <-w.errorChan:
|
|
||||||
return GetAudioProgress{}, fmt.Errorf("error waiting for progress: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"encoding/binary"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
@ -259,6 +260,7 @@ func (s *MediaSetService) getAudioFromS3(ctx context.Context, mediaSet store.Med
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchAudioProgressReader := newGetAudioProgressReader(
|
fetchAudioProgressReader := newGetAudioProgressReader(
|
||||||
|
binary.BigEndian,
|
||||||
int64(mediaSet.AudioFrames.Int64),
|
int64(mediaSet.AudioFrames.Int64),
|
||||||
int(mediaSet.AudioChannels),
|
int(mediaSet.AudioChannels),
|
||||||
numBins,
|
numBins,
|
||||||
|
@ -340,7 +342,7 @@ func (s *MediaSetService) getAudioFromYoutube(ctx context.Context, mediaSet stor
|
||||||
return nil, fmt.Errorf("error creating ffmpegreader: %v", err)
|
return nil, fmt.Errorf("error creating ffmpegreader: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
s3Key := fmt.Sprintf("media_sets/%s/audio.webm", mediaSet.ID)
|
s3Key := fmt.Sprintf("media_sets/%s/audio.raw", mediaSet.ID)
|
||||||
uploader, err := newMultipartUploadWriter(
|
uploader, err := newMultipartUploadWriter(
|
||||||
ctx,
|
ctx,
|
||||||
s.s3,
|
s.s3,
|
||||||
|
@ -353,6 +355,7 @@ func (s *MediaSetService) getAudioFromYoutube(ctx context.Context, mediaSet stor
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchAudioProgressReader := newGetAudioProgressReader(
|
fetchAudioProgressReader := newGetAudioProgressReader(
|
||||||
|
binary.LittleEndian,
|
||||||
int64(mediaSet.AudioFramesApprox),
|
int64(mediaSet.AudioFramesApprox),
|
||||||
format.AudioChannels,
|
format.AudioChannels,
|
||||||
numBins,
|
numBins,
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
import { useState, useEffect, useRef, MouseEvent } from 'react';
|
import { useState, useEffect, useRef, MouseEvent } from 'react';
|
||||||
import {
|
import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set';
|
||||||
MediaSetServiceClientImpl,
|
|
||||||
MediaSet,
|
|
||||||
GetAudioProgress,
|
|
||||||
} from './generated/media_set';
|
|
||||||
import { Frames, newRPC } from './App';
|
import { Frames, newRPC } from './App';
|
||||||
import { WaveformCanvas } from './WaveformCanvas';
|
import { WaveformCanvas } from './WaveformCanvas';
|
||||||
import { mouseEventToCanvasX } from './Helpers';
|
import { mouseEventToCanvasX } from './Helpers';
|
||||||
import { secsToCanvasX } from './Helpers';
|
import { secsToCanvasX } from './Helpers';
|
||||||
|
import { from, Observable } from 'rxjs';
|
||||||
|
import { map } from 'rxjs/operators';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
mediaSet: MediaSet;
|
mediaSet: MediaSet;
|
||||||
|
@ -24,7 +22,7 @@ enum Mode {
|
||||||
Dragging,
|
Dragging,
|
||||||
}
|
}
|
||||||
|
|
||||||
const CanvasLogicalWidth = 2000;
|
const CanvasLogicalWidth = 2_000;
|
||||||
const CanvasLogicalHeight = 500;
|
const CanvasLogicalHeight = 500;
|
||||||
|
|
||||||
const emptySelection = { start: 0, end: 0 };
|
const emptySelection = { start: 0, end: 0 };
|
||||||
|
@ -39,7 +37,7 @@ export const Overview: React.FC<Props> = ({
|
||||||
onSelectionChange,
|
onSelectionChange,
|
||||||
}: Props) => {
|
}: Props) => {
|
||||||
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
||||||
const [peaks, setPeaks] = useState<number[][]>([[], []]);
|
const [peaks, setPeaks] = useState<Observable<number[]>>(from([]));
|
||||||
const [mode, setMode] = useState(Mode.Normal);
|
const [mode, setMode] = useState(Mode.Normal);
|
||||||
const [selection, setSelection] = useState({ ...emptySelection });
|
const [selection, setSelection] = useState({ ...emptySelection });
|
||||||
const [newSelection, setNewSelection] = useState({ ...emptySelection });
|
const [newSelection, setNewSelection] = useState({ ...emptySelection });
|
||||||
|
@ -54,26 +52,31 @@ export const Overview: React.FC<Props> = ({
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const canvas = hudCanvasRef.current;
|
||||||
|
if (canvas == null) {
|
||||||
|
console.error('no hud canvas ref available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (ctx == null) {
|
||||||
|
console.error('no hud 2d context available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
console.log('fetching audio...');
|
console.log('fetching audio...');
|
||||||
const service = new MediaSetServiceClientImpl(newRPC());
|
const service = new MediaSetServiceClientImpl(newRPC());
|
||||||
const observable = service.GetAudio({ id: mediaSet.id, numBins: 2_000 });
|
const audioProgressStream = service.GetAudio({
|
||||||
|
id: mediaSet.id,
|
||||||
console.log('calling forEach...');
|
numBins: CanvasLogicalWidth,
|
||||||
await observable.forEach((progress: GetAudioProgress) => {
|
|
||||||
console.log('got progress', progress.percentCompleted);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('done');
|
const peaks = audioProgressStream.pipe(map((progress) => progress.peaks));
|
||||||
|
setPeaks(peaks);
|
||||||
// const resp = await fetch(
|
|
||||||
// `http://localhost:8888/api/media_sets/${mediaSet.id}/peaks?start=0&end=${mediaSet.audioFrames}&bins=${CanvasLogicalWidth}`
|
|
||||||
// );
|
|
||||||
// const peaks = await resp.json();
|
|
||||||
// setPeaks(peaks);
|
|
||||||
})();
|
})();
|
||||||
}, [mediaSet]);
|
}, [mediaSet]);
|
||||||
|
|
||||||
// draw the overview waveform
|
// draw the overview HUD
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
(async function () {
|
(async function () {
|
||||||
const canvas = hudCanvasRef.current;
|
const canvas = hudCanvasRef.current;
|
||||||
|
@ -229,6 +232,7 @@ export const Overview: React.FC<Props> = ({
|
||||||
<div style={containerStyles}>
|
<div style={containerStyles}>
|
||||||
<WaveformCanvas
|
<WaveformCanvas
|
||||||
peaks={peaks}
|
peaks={peaks}
|
||||||
|
channels={mediaSet.audioChannels}
|
||||||
width={CanvasLogicalWidth}
|
width={CanvasLogicalWidth}
|
||||||
height={CanvasLogicalHeight}
|
height={CanvasLogicalHeight}
|
||||||
strokeStyle="black"
|
strokeStyle="black"
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { Frames } from './App';
|
||||||
import { MediaSet } from './generated/media_set';
|
import { MediaSet } from './generated/media_set';
|
||||||
import { WaveformCanvas } from './WaveformCanvas';
|
import { WaveformCanvas } from './WaveformCanvas';
|
||||||
import { secsToCanvasX } from './Helpers';
|
import { secsToCanvasX } from './Helpers';
|
||||||
|
import { from, Observable } from 'rxjs';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
mediaSet: MediaSet;
|
mediaSet: MediaSet;
|
||||||
|
@ -20,7 +21,7 @@ export const Waveform: React.FC<Props> = ({
|
||||||
viewport,
|
viewport,
|
||||||
offsetPixels,
|
offsetPixels,
|
||||||
}: Props) => {
|
}: Props) => {
|
||||||
const [peaks, setPeaks] = useState<number[][]>([[], []]);
|
const [peaks, setPeaks] = useState<Observable<number[]>>(from([]));
|
||||||
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
|
||||||
|
|
||||||
// effects
|
// effects
|
||||||
|
@ -98,6 +99,7 @@ export const Waveform: React.FC<Props> = ({
|
||||||
<div style={containerStyles}>
|
<div style={containerStyles}>
|
||||||
<WaveformCanvas
|
<WaveformCanvas
|
||||||
peaks={peaks}
|
peaks={peaks}
|
||||||
|
channels={mediaSet.audioChannels}
|
||||||
width={CanvasLogicalWidth}
|
width={CanvasLogicalWidth}
|
||||||
height={CanvasLogicalHeight}
|
height={CanvasLogicalHeight}
|
||||||
strokeStyle="green"
|
strokeStyle="green"
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
import { useEffect, useRef } from 'react';
|
import { useEffect, useRef } from 'react';
|
||||||
|
import { Observable } from 'rxjs';
|
||||||
|
|
||||||
const maxPeakValue = 32_768;
|
const maxPeakValue = 32_768;
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
width: number;
|
width: number;
|
||||||
height: number;
|
height: number;
|
||||||
peaks: number[][] | null;
|
peaks: Observable<number[]>;
|
||||||
|
channels: number;
|
||||||
strokeStyle: string;
|
strokeStyle: string;
|
||||||
fillStyle: string;
|
fillStyle: string;
|
||||||
zIndex: number;
|
zIndex: number;
|
||||||
|
@ -44,24 +46,25 @@ export const WaveformCanvas: React.FC<Props> = (props: Props) => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const numChannels = props.peaks.length;
|
const chanHeight = canvas.height / props.channels;
|
||||||
const chanHeight = canvas.height / numChannels;
|
|
||||||
for (let i = 0; i < numChannels; i++) {
|
let frameIndex = 0;
|
||||||
const yOffset = chanHeight * i;
|
props.peaks.forEach((peaks) => {
|
||||||
// props.peaks[n].length must equal canvasLogicalWidth:
|
for (let chanIndex = 0; chanIndex < peaks.length; chanIndex++) {
|
||||||
for (let j = 0; j < props.peaks[i].length; j++) {
|
const yOffset = chanHeight * chanIndex;
|
||||||
const val = props.peaks[i][j];
|
const val = peaks[chanIndex];
|
||||||
const height = Math.floor((val / maxPeakValue) * chanHeight);
|
const height = Math.floor((val / maxPeakValue) * chanHeight);
|
||||||
const y1 = (chanHeight - height) / 2 + yOffset;
|
const y1 = (chanHeight - height) / 2 + yOffset;
|
||||||
const y2 = y1 + height;
|
const y2 = y1 + height;
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
ctx.globalAlpha = props.alpha;
|
ctx.globalAlpha = props.alpha;
|
||||||
ctx.moveTo(j, y1);
|
ctx.moveTo(frameIndex, y1);
|
||||||
ctx.lineTo(j, y2);
|
ctx.lineTo(frameIndex, y2);
|
||||||
ctx.stroke();
|
ctx.stroke();
|
||||||
ctx.globalAlpha = 1;
|
ctx.globalAlpha = 1;
|
||||||
}
|
}
|
||||||
}
|
frameIndex++;
|
||||||
|
});
|
||||||
}, [props.peaks]);
|
}, [props.peaks]);
|
||||||
|
|
||||||
const canvasStyles = {
|
const canvasStyles = {
|
||||||
|
|
Loading…
Reference in New Issue