diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index a53698a..ddedcba 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,24 +1,13 @@ import React from 'react'; -import logo from './logo.svg'; import './App.css'; +import { Waveform } from './Waveform'; + +const audioContext = new AudioContext(); function App() { return (
-
- logo -

- Edit src/App.tsx and save to reload. -

- - Learn React - -
+
); } diff --git a/frontend/src/Waveform.tsx b/frontend/src/Waveform.tsx new file mode 100644 index 0000000..aebac03 --- /dev/null +++ b/frontend/src/Waveform.tsx @@ -0,0 +1,61 @@ +import { useEffect, useState, useRef } from "react"; + +type WaveformProps = { + audioContext: AudioContext; +}; + +export const Waveform: React.FC = ({ audioContext }: WaveformProps) => { + const [audioData, setAudioData] = useState(null); + const canvasRef = useRef(null); + + // load audio data on page load: + useEffect(() => { + (async function() { + console.log("fetching audio data..."); + + const videoID = "s_oJYdRlrv0"; + + const resp = await fetch(`http://localhost:8888/api/audio?video_id=${videoID}`) + console.log("resp =", resp) + + const body = await resp.arrayBuffer(); + console.log("body =", body) + + const data = await audioContext.decodeAudioData(body); + + console.log("decodedAudio =", data, "len =", data.length); + setAudioData(data); + })(); + }, [audioContext]); + + // render waveform to canvas when audioData is updated: + useEffect(() => { + const canvas = canvasRef.current; + if (canvas == null) { + console.error("no canvas ref available"); + return + } + + const ctx = canvas.getContext("2d"); + if (ctx == null) { + console.error("no 2d context available"); + return; + } + + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + if (audioData == null) { + return; + } + + console.log("rendering audio") + }, [audioData]); + + // render component: + + console.log("rendering, audioData =", audioData); + + const canvasProps = {width: "100%", height: "500px"}; + return +}