Refactor frontend

This commit is contained in:
Rob Watson 2021-10-08 16:38:35 +02:00
parent 084cabaca9
commit 43e2592de8
21 changed files with 927 additions and 807 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
*.m4a *.m4a
/backend/cache/ /backend/cache/
/backend/debug/

View File

@ -9,7 +9,10 @@ import (
const ( const (
DefaultHTTPBindAddr = "0.0.0.0:8888" DefaultHTTPBindAddr = "0.0.0.0:8888"
DefaultTimeout = 30 * time.Second
// Needed to account for slow downloads from Youtube.
// TODO: figure out how to optimize this.
DefaultTimeout = 600 * time.Second
) )
func main() { func main() {

View File

@ -4,7 +4,13 @@ import (
"context" "context"
"flag" "flag"
"fmt" "fmt"
"io"
"log" "log"
"net/http"
"os"
"strings"
"sync"
"time"
"git.netflux.io/rob/clipper/youtube" "git.netflux.io/rob/clipper/youtube"
@ -13,11 +19,15 @@ import (
func main() { func main() {
var ( var (
verbose bool verbose bool
audioOnly bool printMode bool
videoOnly bool downloadMode bool
audioOnly bool
videoOnly bool
) )
flag.BoolVar(&verbose, "v", false, "verbose output") flag.BoolVar(&verbose, "v", false, "verbose output")
flag.BoolVar(&printMode, "print", true, "print format info")
flag.BoolVar(&downloadMode, "download", false, "download all media to ./debug")
flag.BoolVar(&audioOnly, "audio", false, "only print audio formats") flag.BoolVar(&audioOnly, "audio", false, "only print audio formats")
flag.BoolVar(&videoOnly, "video", false, "only print video formats") flag.BoolVar(&videoOnly, "video", false, "only print video formats")
flag.Parse() flag.Parse()
@ -32,6 +42,11 @@ func main() {
} }
formats := video.Formats formats := video.Formats
if downloadMode {
downloadAll(formats)
return
}
switch { switch {
case audioOnly: case audioOnly:
formats = youtube.SortAudio(formats) formats = youtube.SortAudio(formats)
@ -44,3 +59,38 @@ func main() {
fmt.Printf("%d: %s\n", n+1, youtube.FormatDebugString(&f, verbose)) fmt.Printf("%d: %s\n", n+1, youtube.FormatDebugString(&f, verbose))
} }
} }
func downloadAll(formats youtubev2.FormatList) {
var wg sync.WaitGroup
for i := range formats {
format := formats[i]
wg.Add(1)
go func() {
defer wg.Done()
start := time.Now()
outpath := fmt.Sprintf("./debug/%s.%s-itag-%d", strings.ReplaceAll(format.MimeType, "/", "'"), format.Quality, format.ItagNo)
output, err := os.Create(outpath)
if err != nil {
log.Fatalf("error opening output file: %v", err)
}
resp, err := http.Get(format.URL)
if err != nil {
log.Fatalf("error fetching media: %v", err)
}
defer resp.Body.Close()
n, err := io.Copy(output, resp.Body)
if err != nil {
log.Fatalf("error reading media: %v", err)
}
dur := time.Since(start)
log.Printf("downloaded itag %d, %d bytes in %v secs", format.ItagNo, n, dur.Seconds())
}()
}
wg.Wait()
}

View File

@ -45,7 +45,7 @@ func NewMediaSet(id string) *MediaSet {
// TODO: pass io.Readers/Writers instead of strings. // TODO: pass io.Readers/Writers instead of strings.
func (m *MediaSet) RawAudioPath() string { return fmt.Sprintf("cache/%s.raw", m.ID) } func (m *MediaSet) RawAudioPath() string { return fmt.Sprintf("cache/%s.raw", m.ID) }
func (m *MediaSet) EncodedAudioPath() string { return fmt.Sprintf("cache/%s.m4a", m.ID) } func (m *MediaSet) EncodedAudioPath() string { return fmt.Sprintf("cache/%s.m4a", m.ID) }
func (m *MediaSet) VideoPath() string { return fmt.Sprintf("cache/%s.webm", m.ID) } func (m *MediaSet) VideoPath() string { return fmt.Sprintf("cache/%s.mp4", m.ID) }
func (m *MediaSet) ThumbnailPath() string { return fmt.Sprintf("cache/%s.jpg", m.ID) } func (m *MediaSet) ThumbnailPath() string { return fmt.Sprintf("cache/%s.jpg", m.ID) }
func (m *MediaSet) MetadataPath() string { return fmt.Sprintf("cache/%s.json", m.ID) } func (m *MediaSet) MetadataPath() string { return fmt.Sprintf("cache/%s.json", m.ID) }

View File

@ -47,8 +47,8 @@ func getThumbnails(c echo.Context) error {
return c.File(mediaSet.ThumbnailPath()) return c.File(mediaSet.ThumbnailPath())
} }
// getAudio is a handler that responds with the audio file for a MediaSet // getVideo is a handler that responds with the video file for a MediaSet
func getAudio(c echo.Context) error { func getVideo(c echo.Context) error {
videoID := c.Param("id") videoID := c.Param("id")
mediaSet := media.NewMediaSet(videoID) mediaSet := media.NewMediaSet(videoID)
if err := mediaSet.Load(); err != nil { if err := mediaSet.Load(); err != nil {
@ -56,7 +56,7 @@ func getAudio(c echo.Context) error {
return echo.NewHTTPError(http.StatusInternalServerError, "could not load media set") return echo.NewHTTPError(http.StatusInternalServerError, "could not load media set")
} }
return c.File(mediaSet.EncodedAudioPath()) return c.File(mediaSet.VideoPath())
} }
// getPeaks is a handler that returns a two-dimensional array of peaks, with // getPeaks is a handler that returns a two-dimensional array of peaks, with

View File

@ -24,7 +24,7 @@ func Start(opts Options) error {
e.GET("/api/media_sets/:id", getMediaSet) e.GET("/api/media_sets/:id", getMediaSet)
e.GET("/api/media_sets/:id/thumbnails", getThumbnails) e.GET("/api/media_sets/:id/thumbnails", getThumbnails)
e.GET("/api/media_sets/:id/audio", getAudio) e.GET("/api/media_sets/:id/video", getVideo)
e.GET("/api/media_sets/:id/peaks", getPeaks) e.GET("/api/media_sets/:id/peaks", getPeaks)
return e.Start(opts.BindAddr) return e.Start(opts.BindAddr)

View File

@ -9,7 +9,7 @@ import (
) )
func FormatDebugString(format *youtubev2.Format, includeURL bool) string { func FormatDebugString(format *youtubev2.Format, includeURL bool) string {
var url string url := "hidden"
if includeURL { if includeURL {
url = format.URL url = format.URL
} }
@ -65,14 +65,14 @@ func SortAudio(inFormats youtubev2.FormatList) youtubev2.FormatList {
} }
// SortVideo returns the provided formats ordered in descending preferred // SortVideo returns the provided formats ordered in descending preferred
// order. The ideal candidate is video in an mp4 container with a medium // order. The ideal candidate is video in an mp4 container with a low
// bitrate, with audio channels (needed to allow synced playback on the // bitrate, with audio channels (needed to allow synced playback on the
// website). // website).
func SortVideo(inFormats youtubev2.FormatList) youtubev2.FormatList { func SortVideo(inFormats youtubev2.FormatList) youtubev2.FormatList {
// TODO: sort in-place. // TODO: sort in-place.
var formats youtubev2.FormatList var formats youtubev2.FormatList
for _, format := range inFormats { for _, format := range inFormats {
if format.FPS > 0 && format.AudioChannels > 0 { if format.FPS > 0 && format.ContentLength > 0 && format.AudioChannels > 0 {
formats = append(formats, format) formats = append(formats, format)
} }
} }
@ -80,13 +80,9 @@ func SortVideo(inFormats youtubev2.FormatList) youtubev2.FormatList {
isMP4I := strings.Contains(formats[i].MimeType, "mp4") isMP4I := strings.Contains(formats[i].MimeType, "mp4")
isMP4J := strings.Contains(formats[j].MimeType, "mp4") isMP4J := strings.Contains(formats[j].MimeType, "mp4")
if isMP4I && isMP4J { if isMP4I && isMP4J {
return compareQualityLabel(formats[i].QualityLabel, formats[j].QualityLabel) return formats[i].Bitrate < formats[j].Bitrate
} }
return strings.Contains(formats[i].MimeType, "mp4") return isMP4I
}) })
return formats return formats
} }
func compareQualityLabel(a, b string) bool {
return (a == "360p" || a == "480p") && (b != "360p" && b != "480p")
}

View File

@ -20,17 +20,19 @@ import (
youtubev2 "github.com/kkdai/youtube/v2" youtubev2 "github.com/kkdai/youtube/v2"
) )
const ( const SizeOfInt16 = 2
SizeOfInt16 = 2
const (
rawAudioCodec = "pcm_s16le" rawAudioCodec = "pcm_s16le"
rawAudioFormat = "s16le" rawAudioFormat = "s16le"
rawAudioSampleRate = 48000 rawAudioSampleRate = 48000
)
const (
thumbnailPrescaleWidth = -1 thumbnailPrescaleWidth = -1
thumbnailPrescaleHeight = 120 thumbnailPrescaleHeight = 120
thumbnailWidth = 30 thumbnailWidth = 177 // 16:9
thumbnailHeight = 100 thumbnailHeight = 100 // "
) )
// YoutubeClient wraps the youtube.Client client. // YoutubeClient wraps the youtube.Client client.
@ -87,7 +89,7 @@ func (d *Downloader) Download(ctx context.Context, videoID string) (*media.Media
}() }()
go func() { go func() {
defer close(videoResultChan) defer close(videoResultChan)
video, videoErr := d.downloadVideo(ctx, video, mediaSet.ThumbnailPath()) video, videoErr := d.downloadVideo(ctx, video, mediaSet.VideoPath(), mediaSet.ThumbnailPath())
result := videoResult{video, videoErr} result := videoResult{video, videoErr}
videoResultChan <- result videoResultChan <- result
wg.Done() wg.Done()
@ -197,43 +199,29 @@ func thumbnailGridSize(msecs int) (int, int) {
return x, x return x, x
} }
func (d *Downloader) downloadVideo(ctx context.Context, video *youtubev2.Video, thumbnailOutPath string) (*media.Video, error) { func (d *Downloader) downloadVideo(ctx context.Context, video *youtubev2.Video, outPath string, thumbnailOutPath string) (*media.Video, error) {
if len(video.Formats) == 0 { if len(video.Formats) == 0 {
return nil, errors.New("error selecting audio format: no format available") return nil, errors.New("error selecting audio format: no format available")
} }
format := SortVideo(video.Formats)[0] format := SortVideo(video.Formats)[0]
log.Printf("selected video format: %s", FormatDebugString(&format, false)) log.Printf("selected video format: %s", FormatDebugString(&format, false))
durationMsecs, err := strconv.Atoi(format.ApproxDurationMs)
if err != nil {
return nil, fmt.Errorf("could not parse video duration: %s", err)
}
stream, _, err := d.youtubeClient.GetStreamContext(ctx, video, &format) stream, _, err := d.youtubeClient.GetStreamContext(ctx, video, &format)
if err != nil { if err != nil {
return nil, fmt.Errorf("error fetching video stream: %v", err) return nil, fmt.Errorf("error fetching video stream: %v", err)
} }
durationMsecs, err := strconv.Atoi(format.ApproxDurationMs) videoFile, err := os.Create(outPath)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not parse video duration: %s", err) return nil, fmt.Errorf("error creating video file: %v", err)
} }
gridSizeX, gridSizeY := thumbnailGridSize(durationMsecs)
var errOut bytes.Buffer if _, err = io.Copy(videoFile, stream); err != nil {
cmd := exec.CommandContext(
ctx,
"ffmpeg",
"-i",
"-",
"-vf",
fmt.Sprintf("fps=1,scale=%d:%d,crop=%d:%d,tile=%dx%d", thumbnailPrescaleWidth, thumbnailPrescaleHeight, thumbnailWidth, thumbnailHeight, gridSizeX, gridSizeY),
"-f",
"image2pipe",
"-vsync",
"0",
thumbnailOutPath,
)
cmd.Stdin = stream
cmd.Stderr = &errOut
if err = cmd.Run(); err != nil {
log.Println(errOut.String())
return nil, fmt.Errorf("error processing video: %v", err) return nil, fmt.Errorf("error processing video: %v", err)
} }

View File

@ -1,38 +1,7 @@
body {
background-color: #333;
}
.App { .App {
text-align: center; text-align: center;
} }
.App-logo {
height: 40vmin;
pointer-events: none;
}
@media (prefers-reduced-motion: no-preference) {
.App-logo {
animation: App-logo-spin infinite 20s linear;
}
}
.App-header {
background-color: #282c34;
min-height: 100vh;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
font-size: calc(10px + 2vmin);
color: white;
}
.App-link {
color: #61dafb;
}
@keyframes App-logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}

View File

@ -1,14 +1,207 @@
import React from 'react'; import { useState, useEffect } from 'react';
import { VideoPreview } from './VideoPreview';
import { Overview } from './Overview';
import { Waveform } from './Waveform';
import { ControlBar } from './ControlBar';
import { SeekBar } from './SeekBar';
import './App.css'; import './App.css';
import { Waveform } from './Waveform/Waveform';
const audioContext = new AudioContext(); // Audio corresponds to media.Audio.
export interface Audio {
bytes: number;
channels: number;
frames: number;
sampleRate: number;
}
// Video corresponds to media.Video.
export interface Video {
bytes: number;
thumbnailWidth: number;
thumbnailHeight: number;
durationMillis: number;
}
// MediaSet corresponds to media.MediaSet.
export interface MediaSet {
id: string;
source: string;
audio: Audio;
video: Video;
}
// Frames represents a selection of audio frames.
export interface Frames {
start: number;
end: number;
}
function App(): JSX.Element { function App(): JSX.Element {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [video, _setVideo] = useState(document.createElement('video'));
const [position, setPosition] = useState(0);
const [viewport, setViewport] = useState({ start: 0, end: 0 });
// effects
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
// fetch mediaset on page load:
useEffect(() => {
(async function () {
console.log('fetching media...');
const resp = await fetch(
`http://localhost:8888/api/media_sets/${videoID}`
);
const respBody = await resp.json();
if (respBody.error) {
console.log('error fetching media set:', respBody.error);
return;
}
const mediaSet = {
id: respBody.id,
source: respBody.source,
audio: {
sampleRate: respBody.audio.sample_rate,
bytes: respBody.audio.bytes,
frames: respBody.audio.frames,
channels: respBody.audio.channels,
},
video: {
bytes: respBody.video.bytes,
thumbnailWidth: respBody.video.thumbnail_width,
thumbnailHeight: respBody.video.thumbnail_height,
durationMillis: Math.floor(respBody.video.duration / 1000 / 1000),
},
};
setMediaSet(mediaSet);
})();
}, []);
// setup player on first page load only:
useEffect(() => {
setInterval(() => {
setPosition(video.currentTime);
}, 100);
}, []);
// load video when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
video.src = `http://localhost:8888/api/media_sets/${videoID}/video`;
video.muted = false;
video.volume = 1;
console.log('set video src', video.src);
}, [mediaSet]);
// set viewport when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
setViewport({ start: 0, end: mediaSet.audio.frames });
}, [mediaSet]);
useEffect(() => {
console.debug('viewport updated', viewport);
}, [viewport]);
// handlers
const handleOverviewSelectionChange = (selection: Frames) => {
console.log('in handleOverviewSelectionChange', selection);
if (mediaSet == null) {
return;
}
if (selection.start >= selection.end) {
setViewport({ start: 0, end: mediaSet.audio.frames });
return;
}
setViewport({ ...selection });
};
// render component
const containerStyles = {
border: '1px solid black',
width: '90%',
margin: '1em auto',
minHeight: '500px',
height: '700px',
display: 'flex',
flexDirection: 'column',
} as React.CSSProperties;
let offsetPixels = 75;
if (mediaSet != null) {
offsetPixels = Math.floor(mediaSet.video.thumbnailWidth / 2);
}
if (mediaSet == null) {
// TODO: improve
return <></>;
}
return ( return (
<div className="App"> <>
<Waveform audioContext={audioContext} /> <div className="App">
</div> <div style={containerStyles}>
<ControlBar
onPlay={() => {
video.play();
}}
onPause={() => {
video.pause();
}}
/>
<Overview
mediaSet={mediaSet}
offsetPixels={offsetPixels}
height={80}
position={position}
onSelectionStart={(x1: number) => {
console.log('onSelectionStart', x1);
}}
onSelectionChange={handleOverviewSelectionChange}
/>
<Waveform
mediaSet={mediaSet}
position={position}
viewport={viewport}
offsetPixels={offsetPixels}
/>
<SeekBar
position={video.currentTime}
duration={mediaSet.audio.frames / mediaSet.audio.sampleRate}
offsetPixels={offsetPixels}
onPositionChanged={(position: number) => {
video.currentTime = position;
}}
/>
<VideoPreview
video={video}
position={position}
duration={mediaSet.video.durationMillis}
height={mediaSet.video.thumbnailHeight}
/>
</div>
</div>
</>
); );
} }

View File

@ -0,0 +1,30 @@
interface Props {
onPlay: () => void;
onPause: () => void;
}
export const ControlBar: React.FC<Props> = (props: Props) => {
const styles = { width: '100%', flexGrow: 0 };
const buttonStyles = {
cursor: 'pointer',
background: 'black',
outline: 'none',
border: 'none',
color: 'green',
display: 'inline-block',
margin: '0 2px',
};
return (
<>
<div style={styles}>
<button style={buttonStyles} onClick={props.onPlay}>
Play
</button>
<button style={buttonStyles} onClick={props.onPause}>
Pause
</button>
</div>
</>
);
};

View File

@ -1,5 +1,8 @@
import { CanvasLogicalWidth } from './Waveform';
import { MouseEvent } from 'react'; import { MouseEvent } from 'react';
import { Frames } from './App';
// TODO: pass CanvasLogicalWidth as an argument instead.
import { CanvasLogicalWidth } from './Waveform';
interface Point { interface Point {
x: number; x: number;
@ -33,3 +36,21 @@ export const mouseEventToCanvasPoint = (
export const canvasXToFrame = (x: number, numFrames: number): number => { export const canvasXToFrame = (x: number, numFrames: number): number => {
return Math.floor((x / CanvasLogicalWidth) * numFrames); return Math.floor((x / CanvasLogicalWidth) * numFrames);
}; };
// TODO: add tests
// secsToCanvasX returns the logical x coordinate for a given position
// marker. It is null if the marker is outside of the current viewport.
export const secsToCanvasX = (
secs: number,
sampleRate: number,
viewport: Frames
): number | null => {
const frame = Math.floor(secs * sampleRate);
if (frame < viewport.start || frame > viewport.end) {
return null;
}
const logicalPixelsPerFrame =
CanvasLogicalWidth / (viewport.end - viewport.start);
return (frame - viewport.start) * logicalPixelsPerFrame;
};

235
frontend/src/Overview.tsx Normal file
View File

@ -0,0 +1,235 @@
import { useState, useEffect, useRef, MouseEvent } from 'react';
import { MediaSet, Frames } from './App';
import { WaveformCanvas } from './WaveformCanvas';
import { mouseEventToCanvasX } from './Helpers';
import { secsToCanvasX } from './Helpers';
interface Props {
mediaSet: MediaSet;
height: number;
offsetPixels: number;
position: number;
onSelectionStart: (x1: number) => void;
onSelectionChange: (selection: Frames) => void;
}
enum Mode {
Normal,
Selecting,
Dragging,
}
const CanvasLogicalWidth = 2000;
const CanvasLogicalHeight = 500;
const emptySelection = { start: 0, end: 0 };
// TODO: render position marker during playback
export const Overview: React.FC<Props> = ({
mediaSet,
height,
offsetPixels,
position,
onSelectionStart,
onSelectionChange,
}: Props) => {
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
const [peaks, setPeaks] = useState<number[][]>([[], []]);
const [mode, setMode] = useState(Mode.Normal);
const [selection, setSelection] = useState({ ...emptySelection });
const [newSelection, setNewSelection] = useState({ ...emptySelection });
const [dragStart, setDragStart] = useState(0);
// effects
// load peaks on mediaset change
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
const resp = await fetch(
`http://localhost:8888/api/media_sets/${mediaSet.id}/peaks?start=0&end=${mediaSet.audio.frames}&bins=${CanvasLogicalWidth}`
);
const peaks = await resp.json();
setPeaks(peaks);
})();
}, [mediaSet]);
// draw the overview waveform
useEffect(() => {
(async function () {
const canvas = hudCanvasRef.current;
if (canvas == null) {
console.error('no hud canvas ref available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
// draw selection:
let currentSelection: Frames;
if (mode == Mode.Selecting || mode == Mode.Dragging) {
currentSelection = newSelection;
} else {
currentSelection = selection;
}
if (currentSelection.start < currentSelection.end) {
const x1 =
(currentSelection.start / mediaSet.audio.frames) * CanvasLogicalWidth;
const x2 =
(currentSelection.end / mediaSet.audio.frames) * CanvasLogicalWidth;
ctx.beginPath();
ctx.strokeStyle = 'red';
ctx.lineWidth = 4;
ctx.fillStyle = 'rgba(255, 255, 255, 0.15)';
ctx.rect(x1, 2, x2 - x1, canvas.height - 10);
ctx.fill();
ctx.stroke();
}
// draw position marker:
const fullSelection = { start: 0, end: mediaSet.audio.frames }; // constantize?
const x = secsToCanvasX(
position,
mediaSet.audio.sampleRate,
fullSelection
);
// should never happen:
if (x == null) {
return;
}
ctx.strokeStyle = 'red';
ctx.beginPath();
ctx.moveTo(x, 0);
ctx.lineWidth = 4;
ctx.lineTo(x, canvas.height - 4);
ctx.stroke();
})();
});
// publish event on new selection start
useEffect(() => {
onSelectionStart(newSelection.start);
}, [newSelection]);
useEffect(() => {
onSelectionChange({ ...selection });
}, [selection]);
// handlers
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mode != Mode.Normal) {
return;
}
const frame = Math.floor(
mediaSet.audio.frames *
(mouseEventToCanvasX(evt) / evt.currentTarget.width)
);
if (frame >= selection.start && frame < selection.end) {
setMode(Mode.Dragging);
setDragStart(frame);
return;
}
setMode(Mode.Selecting);
setNewSelection({ start: frame, end: frame });
};
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mode == Mode.Normal) {
return;
}
const frame = Math.floor(
mediaSet.audio.frames *
(mouseEventToCanvasX(evt) / evt.currentTarget.width)
);
if (mode == Mode.Dragging) {
const diff = frame - dragStart;
const frameCount = selection.end - selection.start;
let start = Math.max(0, selection.start + diff);
let end = start + frameCount;
if (end > mediaSet.audio.frames) {
end = mediaSet.audio.frames;
start = end - frameCount;
}
setNewSelection({
start: start,
end: end,
});
return;
}
if (frame == newSelection.end) {
return;
}
setNewSelection({ ...newSelection, end: frame });
};
const handleMouseUp = () => {
if (mode == Mode.Normal) {
return;
}
setMode(Mode.Normal);
setSelection(newSelection);
};
// render component
const containerStyles = {
flexGrow: 0,
position: 'relative',
margin: `0 ${offsetPixels}px`,
height: `${height}px`,
} as React.CSSProperties;
const hudCanvasStyles = {
position: 'absolute',
width: '100%',
height: '100%',
display: 'block',
zIndex: 2,
} as React.CSSProperties;
return (
<>
<div style={containerStyles}>
<WaveformCanvas
peaks={peaks}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="black"
fillStyle="#003300"
zIndex={1}
alpha={1}
></WaveformCanvas>
<canvas
ref={hudCanvasRef}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
style={hudCanvasStyles}
onMouseDown={handleMouseDown}
onMouseMove={handleMouseMove}
onMouseUp={handleMouseUp}
></canvas>
</div>
</>
);
};

140
frontend/src/SeekBar.tsx Normal file
View File

@ -0,0 +1,140 @@
import { useRef, useEffect, useState, MouseEvent } from 'react';
import { mouseEventToCanvasPoint } from './Helpers';
interface Props {
position: number;
duration: number;
offsetPixels: number;
onPositionChanged: (posiiton: number) => void;
}
enum Mode {
Normal,
Dragging,
}
const LogicalWidth = 2000;
const LogicalHeight = 100;
const InnerMargin = 40;
export const SeekBar: React.FC<Props> = ({
position,
duration,
offsetPixels,
onPositionChanged,
}: Props) => {
const [mode, setMode] = useState(Mode.Normal);
const [cursor, setCursor] = useState('auto');
const canvasRef = useRef<HTMLCanvasElement>(null);
// render canvas
useEffect(() => {
const canvas = canvasRef.current;
if (canvas == null) {
console.error('no seekbar canvas ref available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no seekbar 2d context available');
return;
}
// Set aspect ratio.
canvas.width = canvas.height * (canvas.clientWidth / canvas.clientHeight);
// background
ctx.fillStyle = '#444444';
ctx.fillRect(0, 0, canvas.width, canvas.height);
// seek bar
const pixelRatio = canvas.width / canvas.clientWidth;
const offset = offsetPixels * pixelRatio;
const width = canvas.width - offset * 2;
ctx.fillStyle = 'black';
ctx.fillRect(offset, InnerMargin, width, canvas.height - InnerMargin * 2);
// pointer
const positionRatio = position / duration;
const x = offset + width * positionRatio;
const y = canvas.height / 2;
ctx.beginPath();
ctx.arc(x, y, 20, 0, 2 * Math.PI, false);
ctx.fillStyle = 'green';
ctx.fill();
});
// helpers
const emitPositionEvent = (evt: MouseEvent<HTMLCanvasElement>) => {
const canvas = evt.currentTarget;
const { x } = mouseEventToCanvasPoint(evt);
const pixelRatio = canvas.width / canvas.clientWidth;
const offset = offsetPixels * pixelRatio;
const ratio = (x - offset) / (canvas.width - offset * 2);
onPositionChanged(ratio * duration);
};
// handlers
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mode != Mode.Normal) return;
setMode(Mode.Dragging);
emitPositionEvent(evt);
};
const handleMouseUp = () => {
if (mode != Mode.Dragging) return;
setMode(Mode.Normal);
};
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
const { y } = mouseEventToCanvasPoint(evt);
// TODO: improve mouse detection around knob.
if (y > InnerMargin && y < LogicalHeight - InnerMargin) {
setCursor('pointer');
} else {
setCursor('auto');
}
if (mode == Mode.Normal) return;
emitPositionEvent(evt);
};
const handleMouseEnter = () => {
if (mode != Mode.Dragging) return;
setMode(Mode.Normal);
};
// render component
const styles = {
width: '100%',
height: '30px',
margin: '0 auto',
cursor: cursor,
};
return (
<>
<canvas
style={styles}
ref={canvasRef}
width={LogicalWidth}
height={LogicalHeight}
onMouseDown={handleMouseDown}
onMouseUp={handleMouseUp}
onMouseMove={handleMouseMove}
onMouseEnter={handleMouseEnter}
></canvas>
</>
);
};

View File

@ -0,0 +1,79 @@
import { useEffect, useRef } from 'react';
interface Props {
position: number;
duration: number;
height: number;
video: HTMLVideoElement;
}
export const VideoPreview: React.FC<Props> = ({
position,
duration,
height,
video,
}: Props) => {
const videoCanvasRef = useRef<HTMLCanvasElement>(null);
// effects
// render canvas
useEffect(() => {
// TODO: not sure if requestAnimationFrame is recommended here.
requestAnimationFrame(() => {
const canvas = videoCanvasRef.current;
if (canvas == null) {
console.error('no canvas ref available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no 2d context available');
return;
}
// Set aspect ratio.
canvas.width = canvas.height * (canvas.clientWidth / canvas.clientHeight);
const durSecs = duration / 1000;
const ratio = position / durSecs;
const x = (canvas.width - 177) * ratio;
ctx.clearRect(0, 0, x, canvas.height);
ctx.clearRect(x + 177, 0, canvas.width - 177 - x, canvas.height);
ctx.drawImage(video, x, 0, 177, 100);
});
}, [position]);
// render component
const containerStyles = {
height: height + 'px',
position: 'relative',
flexGrow: 0,
} as React.CSSProperties;
const canvasStyles = {
position: 'absolute',
width: '100%',
height: '100%',
display: 'block',
zIndex: 1,
} as React.CSSProperties;
return (
<>
<div style={containerStyles}>
<canvas
width="500"
height="100"
ref={videoCanvasRef}
style={canvasStyles}
></canvas>
<canvas style={canvasStyles}></canvas>
</div>
</>
);
};

116
frontend/src/Waveform.tsx Normal file
View File

@ -0,0 +1,116 @@
import { useEffect, useState, useRef } from 'react';
import { Frames, MediaSet } from './App';
import { WaveformCanvas } from './WaveformCanvas';
import { secsToCanvasX } from './Helpers';
interface Props {
mediaSet: MediaSet;
position: number;
viewport: Frames;
offsetPixels: number;
}
export const CanvasLogicalWidth = 2000;
export const CanvasLogicalHeight = 500;
export const Waveform: React.FC<Props> = ({
mediaSet,
position,
viewport,
offsetPixels,
}: Props) => {
const [peaks, setPeaks] = useState<number[][]>([[], []]);
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
// effects
// load peaks on MediaSet change
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
let endFrame = viewport.end;
if (endFrame <= viewport.start) {
endFrame = mediaSet.audio.frames;
}
const resp = await fetch(
`http://localhost:8888/api/media_sets/${mediaSet.id}/peaks?start=${viewport.start}&end=${endFrame}&bins=${CanvasLogicalWidth}`
);
const newPeaks = await resp.json();
setPeaks(newPeaks);
})();
}, [mediaSet, viewport]);
// render HUD
useEffect(() => {
const canvas = hudCanvasRef.current;
if (canvas == null) {
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (mediaSet == null) {
return;
}
const x = secsToCanvasX(position, mediaSet.audio.sampleRate, viewport);
if (x == null) {
return;
}
ctx.strokeStyle = 'red';
ctx.beginPath();
ctx.moveTo(x, 0);
ctx.lineWidth = 4;
ctx.lineTo(x, canvas.height);
ctx.stroke();
}, [viewport, position]);
// render component
const containerStyles = {
background: 'black',
margin: '0 ' + offsetPixels + 'px',
flexGrow: 1,
position: 'relative',
} as React.CSSProperties;
const canvasStyles = {
position: 'absolute',
width: '100%',
height: '100%',
display: 'block',
} as React.CSSProperties;
return (
<>
<div style={containerStyles}>
<WaveformCanvas
peaks={peaks}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
strokeStyle="green"
fillStyle="black"
zIndex={0}
alpha={1}
></WaveformCanvas>
<canvas
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
ref={hudCanvasRef}
style={canvasStyles}
></canvas>
</div>
</>
);
};

View File

@ -1,156 +0,0 @@
import { useEffect, useState, useRef, MouseEvent } from 'react';
import { Canvas as WaveformCanvas } from './Canvas';
import { CanvasLogicalWidth, CanvasLogicalHeight, Selection } from './Waveform';
import { mouseEventToCanvasX } from './Helpers';
interface Props {
peaks: number[][] | null;
numFrames: number;
style: React.CSSProperties;
onSelectionStart: (x1: number) => void;
onSelectionChange: (selection: Selection) => void;
}
enum Mode {
Normal,
Selecting,
}
// TODO: render position marker during playback
export const Waveform: React.FC<Props> = (props: Props) => {
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
const [mode, setMode] = useState(Mode.Normal);
const defaultSelection: Selection = { x1: 0, x2: 0 };
// selection is the current selection in canvas coordinates:
const [selection, setSelection] = useState(defaultSelection);
// newSelection is a new selection in the process of being drawn by the user.
// It is only useful if Mode.Selecting is active.
const [newSelection, setNewSelection] = useState(defaultSelection);
// effects
// draw the overview waveform
useEffect(() => {
(async function () {
const canvas = hudCanvasRef.current;
if (canvas == null) {
console.error('no hud canvas ref available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
let currentSelection: Selection;
if (mode == Mode.Selecting) {
currentSelection = newSelection;
} else {
currentSelection = selection;
}
if (currentSelection.x1 >= currentSelection.x2) {
return;
}
ctx.beginPath();
ctx.strokeStyle = 'red';
ctx.lineWidth = 2;
ctx.fillStyle = 'rgba(255, 255, 255, 0.3)';
ctx.rect(
currentSelection.x1,
2,
currentSelection.x2 - currentSelection.x1,
canvas.height - 8
);
ctx.fill();
ctx.stroke();
})();
});
// handlers
const handleMouseDown = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mode != Mode.Normal) {
return;
}
setMode(Mode.Selecting);
const x = mouseEventToCanvasX(evt);
setNewSelection({ x1: x, x2: x });
props.onSelectionStart(x);
};
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mode != Mode.Selecting) {
return;
}
const x = mouseEventToCanvasX(evt);
if (x == newSelection.x2) {
return;
}
setNewSelection({ ...newSelection, x2: x });
};
const handleMouseUp = () => {
if (mode != Mode.Selecting) {
return;
}
setMode(Mode.Normal);
// TODO: better shallow equality check?
if (selection.x1 !== newSelection.x1 || selection.x2 !== newSelection.x2) {
setSelection(newSelection);
props.onSelectionChange(newSelection);
}
};
// render component
const canvasStyles = {
width: '100%',
height: '100%',
margin: '0 auto',
display: 'block',
} as React.CSSProperties;
const hudCanvasStyles = {
width: '100%',
height: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 1,
} as React.CSSProperties;
return (
<>
<div style={props.style}>
<WaveformCanvas
peaks={props.peaks}
fillStyle="grey"
strokeStyle="black"
style={canvasStyles}
></WaveformCanvas>
<canvas
ref={hudCanvasRef}
style={hudCanvasStyles}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
onMouseDown={handleMouseDown}
onMouseMove={handleMouseMove}
onMouseUp={handleMouseUp}
></canvas>
</div>
</>
);
};

View File

@ -1,82 +0,0 @@
import { useRef, useEffect, useState, MouseEvent } from 'react';
import { mouseEventToCanvasPoint } from './Helpers';
interface Props {
duration: number;
style: React.CSSProperties;
}
const LogicalHeight = 200;
const MarginX = 0;
const MarginY = 85;
const KnobRadius = 40;
export const SeekBar: React.FC<Props> = (props: Props) => {
const canvasRef = useRef<HTMLCanvasElement>(null);
const [position, _setPosition] = useState(100);
const [cursor, setCursor] = useState('auto');
const secsToCanvasX = (secs: number, width: number): number => {
return (secs / props.duration) * width;
};
// draw the canvas
useEffect(() => {
const canvas = canvasRef.current;
if (canvas == null) {
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no seekbar 2d context available');
return;
}
// Set aspect ratio.
canvas.width = canvas.height * (canvas.clientWidth / canvas.clientHeight);
ctx.fillStyle = '#333333';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'black';
ctx.fillRect(
MarginX,
MarginY,
canvas.width - MarginX * 2,
canvas.height - MarginY * 2
);
const x = secsToCanvasX(position, canvas.width);
const y = LogicalHeight / 2;
ctx.beginPath();
ctx.arc(x, y, KnobRadius, 0, 2 * Math.PI, false);
ctx.fillStyle = 'red';
ctx.fill();
});
const style = { ...props.style, cursor: cursor };
// handlers
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
const { x: _x, y: y } = mouseEventToCanvasPoint(evt);
// TODO: improve mouse detection around knob.
if (y > MarginY && y < LogicalHeight - MarginY) {
setCursor('pointer');
} else {
setCursor('auto');
}
};
return (
<>
<canvas
style={style}
ref={canvasRef}
height={LogicalHeight}
onMouseMove={handleMouseMove}
></canvas>
</>
);
};

View File

@ -1,101 +0,0 @@
import { useState, useEffect, useRef } from 'react';
import { CanvasLogicalWidth, MediaSet } from './Waveform';
interface Props {
mediaSet: MediaSet;
style: React.CSSProperties;
}
enum State {
Loading,
Ready,
Error,
}
export const Thumbnails: React.FC<Props> = ({ mediaSet, style }: Props) => {
const [image, _setImage] = useState(new Image());
const [state, setState] = useState(State.Loading);
const canvasRef = useRef<HTMLCanvasElement>(null);
// load thumbnail image when available:
useEffect(() => {
if (mediaSet == null) return;
image.src = `http://localhost:8888/api/media_sets/${mediaSet.id}/thumbnails`;
image.onload = () => {
setState(State.Ready);
};
}, []);
// render canvas if image has been loaded successfully:
useEffect(() => {
if (state != State.Ready) return;
if (mediaSet == null) return;
const canvas = canvasRef.current;
if (canvas == null) {
console.error('no canvas available');
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no thumbnail 2d context available');
return;
}
const tw = mediaSet.video.thumbnailWidth;
const th = mediaSet.video.thumbnailHeight;
const iw = image.width;
const { width: pw, height: ph } = canvas.getBoundingClientRect();
// set canvas logical width to suit the aspect ratio:
// TODO: confirm this is needed.
const ar = tw / th;
const par = pw / ph;
canvas.width = tw * (par / ar);
const durationSecs = mediaSet.video.durationMillis / 1000;
for (let dx = 0; dx < canvas.width; dx += tw) {
const secs = Math.floor((dx / canvas.width) * durationSecs);
const sx = (secs * tw) % iw;
const sy = Math.floor(secs / (iw / tw)) * th;
ctx.drawImage(image, sx, sy, tw, th, dx, 0, tw, th);
}
}, [state]);
// rendering
if (mediaSet == null || mediaSet.video == null) {
console.error('unexpected null video');
return null;
}
if (state == State.Loading) {
return (
<>
<div>Loading...</div>
</>
);
}
if (state == State.Error) {
return (
<>
<span>Something went wrong</span>
</>
);
}
return (
<>
<canvas
ref={canvasRef}
style={style}
width={CanvasLogicalWidth}
height={100}
></canvas>
</>
);
};

View File

@ -1,374 +0,0 @@
import { useEffect, useState, useRef, MouseEvent } from 'react';
import { Waveform as WaveformOverview } from './Overview';
import { Thumbnails } from './Thumbnails';
import { Canvas as WaveformCanvas } from './Canvas';
import { SeekBar } from './SeekBar';
import { canvasXToFrame, mouseEventToCanvasX } from './Helpers';
interface Props {
audioContext: AudioContext;
}
// Audio corresponds to media.Audio.
export interface Audio {
bytes: number;
channels: number;
frames: number;
sampleRate: number;
}
// Video corresponds to media.Video.
export interface Video {
bytes: number;
thumbnailWidth: number;
thumbnailHeight: number;
durationMillis: number;
}
// MediaSet corresponds to media.MediaSet.
export interface MediaSet {
id: string;
source: string;
audio: Audio;
video: Video;
}
export interface Selection {
x1: number;
x2: number;
}
interface ZoomSettings {
startFrame: number;
endFrame: number;
}
const defaultZoomSettings: ZoomSettings = { startFrame: 0, endFrame: 0 };
export const CanvasLogicalWidth = 2000;
export const CanvasLogicalHeight = 500;
export const Waveform: React.FC<Props> = ({ audioContext }: Props) => {
const [mediaSet, setMediaSet] = useState<MediaSet | null>(null);
const [currentTime, setCurrentTime] = useState(0);
// TODO: extract to player component.
const [audio, _setAudio] = useState(new Audio());
const [zoomSettings, setZoomSettings] = useState(defaultZoomSettings);
const [waveformPeaks, setWaveformPeaks] = useState(null);
const [overviewPeaks, setOverviewPeaks] = useState(null);
const hudCanvasRef = useRef<HTMLCanvasElement>(null);
// TODO: error handling
const videoID = new URLSearchParams(window.location.search).get('video_id');
// helpers
// secsToCanvasX returns the logical x coordinate for a given position
// marker. It is null if the marker is outside of the current viewport.
const secsToCanvasX = (secs: number): number | null => {
if (mediaSet == null) {
return null;
}
const frame = secs * mediaSet.audio.sampleRate;
if (frame < zoomSettings.startFrame || frame > zoomSettings.endFrame) {
return null;
}
const logicalPixelsPerFrame =
CanvasLogicalWidth / (zoomSettings.endFrame - zoomSettings.startFrame);
return (frame - zoomSettings.startFrame) * logicalPixelsPerFrame;
};
// effects
// setup player on page load:
useEffect(() => {
(async function () {
audio.addEventListener('timeupdate', () => {
setCurrentTime(audio.currentTime);
});
})();
}, []);
// fetch mediaset on page load:
useEffect(() => {
(async function () {
console.log('fetching media...');
const resp = await fetch(
`http://localhost:8888/api/media_sets/${videoID}`
);
const respBody = await resp.json();
if (respBody.error) {
console.log('error fetching media set:', respBody.error);
return;
}
const mediaSet: MediaSet = {
id: respBody.id,
source: respBody.source,
audio: {
sampleRate: respBody.audio.sample_rate,
bytes: respBody.audio.bytes,
frames: respBody.audio.frames,
channels: respBody.audio.channels,
},
video: {
bytes: respBody.video.bytes,
thumbnailWidth: respBody.video.thumbnail_width,
thumbnailHeight: respBody.video.thumbnail_height,
durationMillis: Math.floor(respBody.video.duration / 1000 / 1000),
},
};
setMediaSet(mediaSet);
setZoomSettings({ startFrame: 0, endFrame: mediaSet.audio.frames });
})();
}, [audioContext]);
// load video when MediaSet is loaded:
useEffect(() => {
if (mediaSet == null) {
return;
}
const url = `http://localhost:8888/api/media_sets/${videoID}/audio`;
audio.src = url;
audio.muted = false;
audio.volume = 1;
}, [mediaSet]);
// fetch new waveform peaks when zoom settings are updated:
useEffect(() => {
(async function () {
if (mediaSet == null) {
return;
}
let endFrame = zoomSettings.endFrame;
if (endFrame <= zoomSettings.startFrame) {
endFrame = mediaSet.audio.frames;
}
const resp = await fetch(
`http://localhost:8888/api/media_sets/${videoID}/peaks?start=${zoomSettings.startFrame}&end=${endFrame}&bins=${CanvasLogicalWidth}`
);
const peaks = await resp.json();
setWaveformPeaks(peaks);
if (overviewPeaks == null) {
setOverviewPeaks(peaks);
}
})();
}, [zoomSettings]);
// redraw HUD
useEffect(() => {
(async function () {
const canvas = hudCanvasRef.current;
if (canvas == null) {
return;
}
const ctx = canvas.getContext('2d');
if (ctx == null) {
console.error('no hud 2d context available');
return;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (mediaSet == null) {
return;
}
const x = secsToCanvasX(currentTime);
if (x == null) {
return;
}
ctx.strokeStyle = 'red';
ctx.beginPath();
ctx.moveTo(x, 0);
ctx.lineTo(x, canvas.height);
ctx.stroke();
})();
}, [currentTime]);
// end of hook configuration.
// TODO: render loading page here.
if (mediaSet == null) {
return null;
}
// callbacks
const handleMouseMove = (evt: MouseEvent<HTMLCanvasElement>) => {
if (mediaSet == null) {
return;
}
const canvasX = mouseEventToCanvasX(evt);
console.log(
'mousemove, x =',
canvasX,
'frame =',
canvasXToFrame(canvasX, mediaSet.audio.frames)
);
};
const handleMouseDown = () => {
return null;
};
const handleMouseUp = () => {
return null;
};
const handlePlay = async () => {
await audio.play();
};
const handlePause = () => {
audio.pause();
};
const handleZoomIn = () => {
if (mediaSet == null) {
return;
}
console.log('zoom in');
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
const endFrame = zoomSettings.startFrame + Math.floor(diff / 2);
const settings = { ...zoomSettings, endFrame: endFrame };
setZoomSettings(settings);
};
const handleZoomOut = () => {
if (mediaSet == null) {
return;
}
console.log('zoom out');
const diff = zoomSettings.endFrame - zoomSettings.startFrame;
const newDiff = diff * 2;
const endFrame = Math.min(
zoomSettings.endFrame + newDiff,
mediaSet.audio.frames
);
const settings = { ...zoomSettings, endFrame: endFrame };
setZoomSettings(settings);
};
const handleSelectionStart = (x: number) => {
const frame = canvasXToFrame(x, mediaSet.audio.frames);
if (audio.paused) {
audio.currentTime = frame / mediaSet.audio.sampleRate;
}
};
const handleSelectionChange = (selection: Selection) => {
if (mediaSet == null) {
return;
}
const startFrame = canvasXToFrame(selection.x1, mediaSet.audio.frames);
const endFrame = canvasXToFrame(selection.x2, mediaSet.audio.frames);
const settings: ZoomSettings = {
startFrame: startFrame,
endFrame: endFrame,
};
setZoomSettings(settings);
audio.currentTime = startFrame / mediaSet.audio.sampleRate;
};
// render component:
const wrapperProps = {
width: '90%',
height: '550px',
position: 'relative',
margin: '0 auto',
} as React.CSSProperties;
const waveformCanvasProps = {
width: '100%',
height: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 0,
} as React.CSSProperties;
const hudCanvasProps = {
width: '100%',
height: '100%',
position: 'absolute',
top: 0,
left: 0,
right: 0,
bottom: 0,
zIndex: 1,
} as React.CSSProperties;
const overviewStyles = { ...wrapperProps, height: '120px' };
// TODO: why is the margin needed?
const controlPanelStyles = { margin: '1em' } as React.CSSProperties;
const clockTextAreaProps = { color: '#999', width: '400px' };
const thumbnailStyles = {
width: '90%',
height: '35px',
margin: '10px auto 0 auto',
display: 'block',
};
const seekBarStyles = {
width: '90%',
height: '50px',
margin: '0 auto',
display: 'block',
};
return (
<>
<Thumbnails mediaSet={mediaSet} style={thumbnailStyles} />
<WaveformOverview
peaks={overviewPeaks}
numFrames={mediaSet.audio.frames}
style={overviewStyles}
onSelectionStart={handleSelectionStart}
onSelectionChange={handleSelectionChange}
></WaveformOverview>
<div style={wrapperProps}>
<WaveformCanvas
peaks={waveformPeaks}
fillStyle="black"
strokeStyle="green"
style={waveformCanvasProps}
></WaveformCanvas>
<canvas
ref={hudCanvasRef}
onMouseMove={handleMouseMove}
onMouseDown={handleMouseDown}
onMouseUp={handleMouseUp}
style={hudCanvasProps}
width={CanvasLogicalWidth}
height={CanvasLogicalHeight}
></canvas>
</div>
<SeekBar
duration={mediaSet.audio.frames / mediaSet.audio.sampleRate}
style={seekBarStyles}
/>
<div style={controlPanelStyles}>
<button onClick={handlePlay}>Play</button>
<button onClick={handlePause}>Pause</button>
<button onClick={handleZoomIn}>+</button>
<button onClick={handleZoomOut}>-</button>
<input type="readonly" style={clockTextAreaProps} />
</div>
</>
);
};

View File

@ -1,13 +1,15 @@
import { useEffect, useRef } from 'react'; import { useEffect, useRef } from 'react';
import { CanvasLogicalWidth, CanvasLogicalHeight } from './Waveform';
const maxPeakValue = 32_768; const maxPeakValue = 32_768;
interface Props { interface Props {
width: number;
height: number;
peaks: number[][] | null; peaks: number[][] | null;
strokeStyle: string; strokeStyle: string;
fillStyle: string; fillStyle: string;
style: React.CSSProperties; zIndex: number;
alpha: number;
} }
// Canvas is a generic component that renders a waveform to a canvas. // Canvas is a generic component that renders a waveform to a canvas.
@ -18,7 +20,7 @@ interface Props {
// strokeStyle: waveform style // strokeStyle: waveform style
// fillStyle: background style // fillStyle: background style
// style: React.CSSProperties applied to canvas element // style: React.CSSProperties applied to canvas element
export const Canvas: React.FC<Props> = (props: Props) => { export const WaveformCanvas: React.FC<Props> = (props: Props) => {
const canvasRef = useRef<HTMLCanvasElement>(null); const canvasRef = useRef<HTMLCanvasElement>(null);
useEffect(() => { useEffect(() => {
@ -53,20 +55,30 @@ export const Canvas: React.FC<Props> = (props: Props) => {
const y1 = (chanHeight - height) / 2 + yOffset; const y1 = (chanHeight - height) / 2 + yOffset;
const y2 = y1 + height; const y2 = y1 + height;
ctx.beginPath(); ctx.beginPath();
ctx.globalAlpha = props.alpha;
ctx.moveTo(j, y1); ctx.moveTo(j, y1);
ctx.lineTo(j, y2); ctx.lineTo(j, y2);
ctx.stroke(); ctx.stroke();
ctx.globalAlpha = 1;
} }
} }
}, [props.peaks]); }, [props.peaks]);
const canvasStyles = {
display: 'block',
position: 'absolute',
width: '100%',
height: '100%',
zIndex: props.zIndex,
} as React.CSSProperties;
return ( return (
<> <>
<canvas <canvas
ref={canvasRef} ref={canvasRef}
width={CanvasLogicalWidth} width={props.width}
height={CanvasLogicalHeight} height={props.height}
style={props.style} style={canvasStyles}
></canvas> ></canvas>
</> </>
); );