From 374137256e33d735bfe5ec8a0613aa7b9552ddbb Mon Sep 17 00:00:00 2001 From: Michael Evans Date: Sun, 2 Jan 2022 01:32:04 -0600 Subject: [PATCH] Proof of concept for input field with routing to video page --- frontend/package.json | 1 + frontend/src/App.tsx | 403 +------------------------- frontend/src/Overview.tsx | 1 - frontend/src/VideoPreview.tsx | 2 +- frontend/src/Waveform.tsx | 28 +- frontend/src/components/HomePage.tsx | 40 +++ frontend/src/components/VideoPage.tsx | 391 +++++++++++++++++++++++++ frontend/src/types.d.ts | 9 + frontend/yarn.lock | 29 ++ 9 files changed, 497 insertions(+), 407 deletions(-) create mode 100644 frontend/src/components/HomePage.tsx create mode 100644 frontend/src/components/VideoPage.tsx create mode 100644 frontend/src/types.d.ts diff --git a/frontend/package.json b/frontend/package.json index e8fec40..641667a 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -14,6 +14,7 @@ "google-protobuf": "^3.19.0", "react": "^17.0.2", "react-dom": "^17.0.2", + "react-router-dom": "^6.2.1", "react-scripts": "4.0.3", "typescript": "^4.1.2", "web-vitals": "^1.0.1" diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index e105a0c..545bec9 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,403 +1,24 @@ -import { - MediaSet, - GrpcWebImpl, - MediaSetServiceClientImpl, - GetVideoProgress, - GetPeaksProgress, -} from './generated/media_set'; +import { BrowserRouter, Route, Routes } from "react-router-dom"; +import HomePage from "./components/HomePage"; +import VideoPage from "./components/VideoPage"; +import { GrpcWebImpl } from "./generated/media_set"; +import "./App.css"; -import { useState, useEffect, useRef, useCallback } from 'react'; -import { AudioFormat } from './generated/media_set'; -import { VideoPreview } from './VideoPreview'; -import { Overview, CanvasLogicalWidth } from './Overview'; -import { Waveform } from './Waveform'; -import { ControlBar } from './ControlBar'; -import { SeekBar } from './SeekBar'; -import './App.css'; -import { Duration } from './generated/google/protobuf/duration'; -import { firstValueFrom, from, Observable } from 'rxjs'; -import { first, map } from 'rxjs/operators'; - -// ported from backend, where should they live? -const thumbnailWidth = 177; -const thumbnailHeight = 100; - -const initialViewportCanvasPixels = 100; - -const apiURL = process.env.REACT_APP_API_URL || 'http://localhost:8888'; - -// Frames represents a range of audio frames. -export interface Frames { - start: number; - end: number; -} - -export interface VideoPosition { - currentTime: number; - percent: number; -} - -const video = document.createElement('video'); -const audio = document.createElement('audio'); +const apiURL = process.env.REACT_APP_API_URL || "http://localhost:8888"; function App(): JSX.Element { - const [mediaSet, setMediaSet] = useState(null); - const [viewport, setViewport] = useState({ start: 0, end: 0 }); - const [selection, setSelection] = useState({ start: 0, end: 0 }); - const [overviewPeaks, setOverviewPeaks] = useState>( - from([]) - ); - - // position stores the current playback position. positionRef makes it - // available inside a setInterval callback. - const [position, setPosition] = useState({ currentTime: 0, percent: 0 }); - const positionRef = useRef(position); - positionRef.current = position; - - // effects - - // TODO: error handling - const videoID = new URLSearchParams(window.location.search).get('video_id'); - if (videoID == null) { - return <>; - } - - // fetch mediaset on page load: - useEffect(() => { - (async function () { - const rpc = newRPC(); - const service = new MediaSetServiceClientImpl(rpc); - const mediaSet = await service.Get({ youtubeId: videoID }); - - console.log('got media set:', mediaSet); - setMediaSet(mediaSet); - })(); - }, []); - - const updatePlayerPositionIntevalMillis = 30; - - // setup player on first page load only: - useEffect(() => { - if (mediaSet == null) { - return; - } - - const intervalID = setInterval(() => { - const currTime = audio.currentTime; - if (currTime == positionRef.current.currentTime) { - return; - } - const duration = mediaSet.audioFrames / mediaSet.audioSampleRate; - const percent = (currTime / duration) * 100; - - // check if the end of selection has been passed, and pause if so: - if ( - currentTimeToFrame(position.currentTime) < selection.end && - currentTimeToFrame(currTime) >= selection.end - ) { - handlePause(); - } - - // update the current position - setPosition({ currentTime: audio.currentTime, percent: percent }); - }, updatePlayerPositionIntevalMillis); - - return () => clearInterval(intervalID); - }, [mediaSet, selection]); - - // bind to keypress handler. - // selection is a dependency of the handleKeyPress handler, and must be - // included here. - useEffect(() => { - document.addEventListener('keypress', handleKeyPress); - return () => document.removeEventListener('keypress', handleKeyPress); - }, [selection]); - - // load audio when MediaSet is loaded: - useEffect(() => { - (async function () { - if (mediaSet == null) { - return; - } - console.log('fetching audio...'); - const service = new MediaSetServiceClientImpl(newRPC()); - const audioProgressStream = service.GetPeaks({ - id: mediaSet.id, - numBins: CanvasLogicalWidth, - }); - const peaks = audioProgressStream.pipe(map((progress) => progress.peaks)); - setOverviewPeaks(peaks); - - const pipe = audioProgressStream.pipe( - first((progress: GetPeaksProgress) => progress.url != '') - ); - const progressWithURL = await firstValueFrom(pipe); - - audio.src = progressWithURL.url; - audio.muted = false; - audio.volume = 1; - console.log('set audio src', progressWithURL.url); - })(); - }, [mediaSet]); - - // load video when MediaSet is loaded: - useEffect(() => { - (async function () { - if (mediaSet == null) { - return; - } - - console.log('fetching video...'); - const service = new MediaSetServiceClientImpl(newRPC()); - const videoProgressStream = service.GetVideo({ id: mediaSet.id }); - const pipe = videoProgressStream.pipe( - first((progress: GetVideoProgress) => progress.url != '') - ); - const progressWithURL = await firstValueFrom(pipe); - - video.src = progressWithURL.url; - console.log('set video src', progressWithURL.url); - })(); - }, [mediaSet]); - - // set viewport when MediaSet is loaded: - useEffect(() => { - if (mediaSet == null) { - return; - } - - const numFrames = Math.min( - Math.round(mediaSet.audioFrames / CanvasLogicalWidth) * - initialViewportCanvasPixels, - mediaSet.audioFrames - ); - - setViewport({ start: 0, end: numFrames }); - }, [mediaSet]); - - useEffect(() => { - console.debug('viewport updated', viewport); - }, [viewport]); - - // handlers - - const handleKeyPress = useCallback( - (evt: KeyboardEvent) => { - if (evt.code != 'Space') { - return; - } - - if (audio.paused) { - handlePlay(); - } else { - handlePause(); - } - }, - [selection] - ); - - // handler called when the selection in the overview (zoom setting) is changed. - const handleOverviewSelectionChange = useCallback( - (newViewport: Frames) => { - if (mediaSet == null) { - return; - } - console.log('set new viewport', newViewport); - setViewport({ ...newViewport }); - - if (!audio.paused) { - return; - } - - setPositionFromFrame(newViewport.start); - }, - [mediaSet, audio, video, selection] - ); - - // handler called when the selection in the main waveform view is changed. - const handleWaveformSelectionChange = useCallback( - (newSelection: Frames) => { - setSelection(newSelection); - - if (mediaSet == null) { - return; - } - - // move playback position to start of selection - const ratio = newSelection.start / mediaSet.audioFrames; - const currentTime = - (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; - audio.currentTime = currentTime; - video.currentTime = currentTime; - }, - [mediaSet, audio, video, selection] - ); - - const handlePlay = useCallback(() => { - audio.play(); - video.play(); - }, [audio, video]); - - const handlePause = useCallback(() => { - video.pause(); - audio.pause(); - - if (selection.start != selection.end) { - setPositionFromFrame(selection.start); - } - }, [audio, video, selection]); - - const handleClip = useCallback(() => { - (async function () { - console.debug('clip', selection); - - if (mediaSet == null) { - return; - } - - // TODO: support File System Access API fallback - const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' }); - const fileStream = await h.createWritable(); - - const rpc = newRPC(); - const service = new MediaSetServiceClientImpl(rpc); - const stream = service.GetAudioSegment({ - id: mediaSet.id, - format: AudioFormat.MP3, - startFrame: selection.start, - endFrame: selection.end, - }); - - await stream.forEach((p) => fileStream.write(p.audioData)); - console.debug('finished writing stream'); - - await fileStream.close(); - console.debug('closed stream'); - })(); - }, [mediaSet, selection]); - - const setPositionFromFrame = useCallback( - (frame: number) => { - if (mediaSet == null) { - return; - } - const ratio = frame / mediaSet.audioFrames; - const currentTime = - (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; - audio.currentTime = currentTime; - video.currentTime = currentTime; - }, - [mediaSet, audio, video] - ); - - // helpers - - const currentTimeToFrame = useCallback( - (currentTime: number): number => { - if (mediaSet == null) { - return 0; - } - const dur = mediaSet.audioFrames / mediaSet.audioSampleRate; - const ratio = currentTime / dur; - return Math.round(mediaSet.audioFrames * ratio); - }, - [mediaSet] - ); - - // render component - - const containerStyles = { - border: '1px solid black', - width: '90%', - margin: '1em auto', - minHeight: '500px', - height: '700px', - display: 'flex', - flexDirection: 'column', - } as React.CSSProperties; - - const offsetPixels = Math.floor(thumbnailWidth / 2); - - if (mediaSet == null) { - // TODO: improve - return <>; - } - return ( - <> -
-
- - - - - - - { - video.currentTime = position; - audio.currentTime = position; - }} - /> - - -
-
    -
  • Frames: {mediaSet.audioFrames}
  • -
  • - Viewport (frames): {viewport.start} to {viewport.end} -
  • -
  • - Selection (frames): {selection.start} to {selection.end} -
  • -
  • - Position (frames):{' '} - {Math.round(mediaSet.audioFrames * (position.percent / 100))} -
  • -
  • Position (seconds): {position.currentTime}
  • -
  • -
-
- + + + } /> + } /> + + ); } export default App; -function millisFromDuration(dur?: Duration): number { - if (dur == undefined) { - return 0; - } - return Math.floor(dur.seconds * 1000.0 + dur.nanos / 1000.0 / 1000.0); -} - export function newRPC(): GrpcWebImpl { return new GrpcWebImpl(apiURL, {}); } diff --git a/frontend/src/Overview.tsx b/frontend/src/Overview.tsx index 7dcc689..03b42a5 100644 --- a/frontend/src/Overview.tsx +++ b/frontend/src/Overview.tsx @@ -1,6 +1,5 @@ import { useState, useEffect, useCallback } from 'react'; import { MediaSet } from './generated/media_set'; -import { Frames, VideoPosition } from './App'; import { WaveformCanvas } from './WaveformCanvas'; import { HudCanvas, EmptySelectionAction } from './HudCanvas'; import { Observable } from 'rxjs'; diff --git a/frontend/src/VideoPreview.tsx b/frontend/src/VideoPreview.tsx index a2dee50..64248db 100644 --- a/frontend/src/VideoPreview.tsx +++ b/frontend/src/VideoPreview.tsx @@ -1,5 +1,5 @@ import { MediaSet, MediaSetServiceClientImpl } from './generated/media_set'; -import { newRPC, VideoPosition } from './App'; +import { newRPC } from './App'; import { useEffect, useRef } from 'react'; interface Props { diff --git a/frontend/src/Waveform.tsx b/frontend/src/Waveform.tsx index dbb60ca..d2b7291 100644 --- a/frontend/src/Waveform.tsx +++ b/frontend/src/Waveform.tsx @@ -1,10 +1,10 @@ -import { useEffect, useState, useCallback } from 'react'; -import { Frames, VideoPosition, newRPC } from './App'; -import { MediaSetServiceClientImpl, MediaSet } from './generated/media_set'; -import { WaveformCanvas } from './WaveformCanvas'; -import { Selection, HudCanvas, EmptySelectionAction } from './HudCanvas'; -import { from, Observable } from 'rxjs'; -import { bufferCount } from 'rxjs/operators'; +import { useEffect, useState, useCallback } from "react"; +import { newRPC } from "./App"; +import { MediaSetServiceClientImpl, MediaSet } from "./generated/media_set"; +import { WaveformCanvas } from "./WaveformCanvas"; +import { Selection, HudCanvas, EmptySelectionAction } from "./HudCanvas"; +import { from, Observable } from "rxjs"; +import { bufferCount } from "rxjs/operators"; interface Props { mediaSet: MediaSet; @@ -45,7 +45,7 @@ export const Waveform: React.FC = ({ return; } - console.log('fetch audio segment, frames', viewport); + console.log("fetch audio segment, frames", viewport); const service = new MediaSetServiceClientImpl(newRPC()); const segment = await service.GetPeaksForSegment({ @@ -55,7 +55,7 @@ export const Waveform: React.FC = ({ endFrame: viewport.end, }); - console.log('got segment', segment); + console.log("got segment", segment); const peaks = from(segment.peaks).pipe( bufferCount(mediaSet.audioChannels) @@ -119,17 +119,17 @@ export const Waveform: React.FC = ({ // render component const containerStyles = { - background: 'black', - margin: '0 ' + offsetPixels + 'px', + background: "black", + margin: "0 " + offsetPixels + "px", flexGrow: 1, - position: 'relative', + position: "relative", } as React.CSSProperties; const hudStyles = { borderLineWidth: 0, - borderStrokeStyle: 'transparent', + borderStrokeStyle: "transparent", positionLineWidth: 6, - positionStrokeStyle: 'red', + positionStrokeStyle: "red", }; return ( diff --git a/frontend/src/components/HomePage.tsx b/frontend/src/components/HomePage.tsx new file mode 100644 index 0000000..3375c9f --- /dev/null +++ b/frontend/src/components/HomePage.tsx @@ -0,0 +1,40 @@ +import { ChangeEventHandler, MouseEventHandler, useState } from "react"; +import { useNavigate } from "react-router-dom"; + +const extractVideoIDFromURL = (input: string): string | null => { + const { searchParams } = new URL(input); + return searchParams.get("v"); +}; + +function HomePage(): JSX.Element { + const [input, setInput] = useState(""); + const [error, setError] = useState(null); + const navigate = useNavigate(); + + const handleChange: ChangeEventHandler = (event) => { + setInput(event.target.value); + }; + + const handleSubmit: MouseEventHandler = () => { + try { + const videoId = extractVideoIDFromURL(input); + if (videoId === null) { + setError("URL not valid, please enter a valid YouTube URL"); + } else { + navigate(`/video/${videoId}`); + } + } catch (err) { + setError("URL not valid, please enter a valid YouTube URL"); + } + }; + + return ( +
+ + + {Boolean(error) &&
{error}
} +
+ ); +} + +export default HomePage; diff --git a/frontend/src/components/VideoPage.tsx b/frontend/src/components/VideoPage.tsx new file mode 100644 index 0000000..af53112 --- /dev/null +++ b/frontend/src/components/VideoPage.tsx @@ -0,0 +1,391 @@ +import { + MediaSet, + MediaSetServiceClientImpl, + GetVideoProgress, + GetPeaksProgress, +} from '../generated/media_set'; + +import { useState, useEffect, useRef, useCallback } from 'react'; +import { AudioFormat } from '../generated/media_set'; +import { VideoPreview } from '../VideoPreview'; +import { Overview, CanvasLogicalWidth } from '../Overview'; +import { Waveform } from '../Waveform'; +import { ControlBar } from '../ControlBar'; +import { SeekBar } from '../SeekBar'; +import { Duration } from '../generated/google/protobuf/duration'; +import { firstValueFrom, from, Observable } from 'rxjs'; +import { first, map } from 'rxjs/operators'; +import { newRPC } from '../App'; +import { useParams } from 'react-router-dom'; + +// ported from backend, where should they live? +const thumbnailWidth = 177; +const thumbnailHeight = 100; + +const initialViewportCanvasPixels = 100; + +// Frames represents a range of audio frames. + +const video = document.createElement('video'); +const audio = document.createElement('audio'); + +type VideoPageParams = { + videoId: string; +} + +function VideoPage(): JSX.Element { + const [mediaSet, setMediaSet] = useState(null); + const [viewport, setViewport] = useState({ start: 0, end: 0 }); + const [selection, setSelection] = useState({ start: 0, end: 0 }); + const [overviewPeaks, setOverviewPeaks] = useState>( + from([]) + ); + const { videoId } = useParams(); + + // position stores the current playback position. positionRef makes it + // available inside a setInterval callback. + const [position, setPosition] = useState({ currentTime: 0, percent: 0 }); + const positionRef = useRef(position); + positionRef.current = position; + + // effects + + // TODO: error handling + + if (videoId == null) { + return <>; + } + + // fetch mediaset on page load: + useEffect(() => { + (async function () { + const rpc = newRPC(); + const service = new MediaSetServiceClientImpl(rpc); + const mediaSet = await service.Get({ youtubeId: videoId }); + + console.log('got media set:', mediaSet); + setMediaSet(mediaSet); + })(); + }, []); + + const updatePlayerPositionIntevalMillis = 30; + + // setup player on first page load only: + useEffect(() => { + if (mediaSet == null) { + return; + } + + const intervalID = setInterval(() => { + const currTime = audio.currentTime; + if (currTime == positionRef.current.currentTime) { + return; + } + const duration = mediaSet.audioFrames / mediaSet.audioSampleRate; + const percent = (currTime / duration) * 100; + + // check if the end of selection has been passed, and pause if so: + if ( + currentTimeToFrame(position.currentTime) < selection.end && + currentTimeToFrame(currTime) >= selection.end + ) { + handlePause(); + } + + // update the current position + setPosition({ currentTime: audio.currentTime, percent: percent }); + }, updatePlayerPositionIntevalMillis); + + return () => clearInterval(intervalID); + }, [mediaSet, selection]); + + // bind to keypress handler. + // selection is a dependency of the handleKeyPress handler, and must be + // included here. + useEffect(() => { + document.addEventListener('keypress', handleKeyPress); + return () => document.removeEventListener('keypress', handleKeyPress); + }, [selection]); + + // load audio when MediaSet is loaded: + useEffect(() => { + (async function () { + if (mediaSet == null) { + return; + } + console.log('fetching audio...'); + const service = new MediaSetServiceClientImpl(newRPC()); + const audioProgressStream = service.GetPeaks({ + id: mediaSet.id, + numBins: CanvasLogicalWidth, + }); + const peaks = audioProgressStream.pipe(map((progress) => progress.peaks)); + setOverviewPeaks(peaks); + + const pipe = audioProgressStream.pipe( + first((progress: GetPeaksProgress) => progress.url != '') + ); + const progressWithURL = await firstValueFrom(pipe); + + audio.src = progressWithURL.url; + audio.muted = false; + audio.volume = 1; + console.log('set audio src', progressWithURL.url); + })(); + }, [mediaSet]); + + // load video when MediaSet is loaded: + useEffect(() => { + (async function () { + if (mediaSet == null) { + return; + } + + console.log('fetching video...'); + const service = new MediaSetServiceClientImpl(newRPC()); + const videoProgressStream = service.GetVideo({ id: mediaSet.id }); + const pipe = videoProgressStream.pipe( + first((progress: GetVideoProgress) => progress.url != '') + ); + const progressWithURL = await firstValueFrom(pipe); + + video.src = progressWithURL.url; + console.log('set video src', progressWithURL.url); + })(); + }, [mediaSet]); + + // set viewport when MediaSet is loaded: + useEffect(() => { + if (mediaSet == null) { + return; + } + + const numFrames = Math.min( + Math.round(mediaSet.audioFrames / CanvasLogicalWidth) * + initialViewportCanvasPixels, + mediaSet.audioFrames + ); + + setViewport({ start: 0, end: numFrames }); + }, [mediaSet]); + + useEffect(() => { + console.debug('viewport updated', viewport); + }, [viewport]); + + // handlers + + const handleKeyPress = useCallback( + (evt: KeyboardEvent) => { + if (evt.code != 'Space') { + return; + } + + if (audio.paused) { + handlePlay(); + } else { + handlePause(); + } + }, + [selection] + ); + + // handler called when the selection in the overview (zoom setting) is changed. + const handleOverviewSelectionChange = useCallback( + (newViewport: Frames) => { + if (mediaSet == null) { + return; + } + console.log('set new viewport', newViewport); + setViewport({ ...newViewport }); + + if (!audio.paused) { + return; + } + + setPositionFromFrame(newViewport.start); + }, + [mediaSet, audio, video, selection] + ); + + // handler called when the selection in the main waveform view is changed. + const handleWaveformSelectionChange = useCallback( + (newSelection: Frames) => { + setSelection(newSelection); + + if (mediaSet == null) { + return; + } + + // move playback position to start of selection + const ratio = newSelection.start / mediaSet.audioFrames; + const currentTime = + (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; + audio.currentTime = currentTime; + video.currentTime = currentTime; + }, + [mediaSet, audio, video, selection] + ); + + const handlePlay = useCallback(() => { + audio.play(); + video.play(); + }, [audio, video]); + + const handlePause = useCallback(() => { + video.pause(); + audio.pause(); + + if (selection.start != selection.end) { + setPositionFromFrame(selection.start); + } + }, [audio, video, selection]); + + const handleClip = useCallback(() => { + (async function () { + console.debug('clip', selection); + + if (mediaSet == null) { + return; + } + + // TODO: support File System Access API fallback + const h = await window.showSaveFilePicker({ suggestedName: 'clip.mp3' }); + const fileStream = await h.createWritable(); + + const rpc = newRPC(); + const service = new MediaSetServiceClientImpl(rpc); + const stream = service.GetAudioSegment({ + id: mediaSet.id, + format: AudioFormat.MP3, + startFrame: selection.start, + endFrame: selection.end, + }); + + await stream.forEach((p) => fileStream.write(p.audioData)); + console.debug('finished writing stream'); + + await fileStream.close(); + console.debug('closed stream'); + })(); + }, [mediaSet, selection]); + + const setPositionFromFrame = useCallback( + (frame: number) => { + if (mediaSet == null) { + return; + } + const ratio = frame / mediaSet.audioFrames; + const currentTime = + (mediaSet.audioFrames / mediaSet.audioSampleRate) * ratio; + audio.currentTime = currentTime; + video.currentTime = currentTime; + }, + [mediaSet, audio, video] + ); + + // helpers + + const currentTimeToFrame = useCallback( + (currentTime: number): number => { + if (mediaSet == null) { + return 0; + } + const dur = mediaSet.audioFrames / mediaSet.audioSampleRate; + const ratio = currentTime / dur; + return Math.round(mediaSet.audioFrames * ratio); + }, + [mediaSet] + ); + + // render component + + const containerStyles = { + border: '1px solid black', + width: '90%', + margin: '1em auto', + minHeight: '500px', + height: '700px', + display: 'flex', + flexDirection: 'column', + } as React.CSSProperties; + + const offsetPixels = Math.floor(thumbnailWidth / 2); + + if (mediaSet == null) { + // TODO: improve + return <>; + } + + return ( +
+
+ + + + + + + { + video.currentTime = position; + audio.currentTime = position; + }} + /> + + +
+
    +
  • Frames: {mediaSet.audioFrames}
  • +
  • + Viewport (frames): {viewport.start} to {viewport.end} +
  • +
  • + Selection (frames): {selection.start} to {selection.end} +
  • +
  • + Position (frames):{' '} + {Math.round(mediaSet.audioFrames * (position.percent / 100))} +
  • +
  • Position (seconds): {position.currentTime}
  • +
  • +
+
+ ); +} + +export default VideoPage; + +function millisFromDuration(dur?: Duration): number { + if (dur == undefined) { + return 0; + } + return Math.floor(dur.seconds * 1000.0 + dur.nanos / 1000.0 / 1000.0); +} diff --git a/frontend/src/types.d.ts b/frontend/src/types.d.ts new file mode 100644 index 0000000..2c65b8e --- /dev/null +++ b/frontend/src/types.d.ts @@ -0,0 +1,9 @@ +interface Frames { + start: number; + end: number; +} + +interface VideoPosition { + currentTime: number; + percent: number; +} diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 672bec9..b32897e 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -1172,6 +1172,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.7.6": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.16.7.tgz#03ff99f64106588c9c403c6ecb8c3bafbbdff1fa" + integrity sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/template@^7.10.4", "@babel/template@^7.16.0", "@babel/template@^7.3.3": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" @@ -5648,6 +5655,13 @@ hex-color-regex@^1.1.0: resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== +history@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/history/-/history-5.2.0.tgz#7cdd31cf9bac3c5d31f09c231c9928fad0007b7c" + integrity sha512-uPSF6lAJb3nSePJ43hN3eKj1dTWpN9gMod0ZssbFTIsen+WehTmEadgL+kg78xLJFdRfrrC//SavDzmRVdE+Ig== + dependencies: + "@babel/runtime" "^7.7.6" + hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" @@ -9324,6 +9338,21 @@ react-refresh@^0.8.3: resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f" integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg== +react-router-dom@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.2.1.tgz#32ec81829152fbb8a7b045bf593a22eadf019bec" + integrity sha512-I6Zax+/TH/cZMDpj3/4Fl2eaNdcvoxxHoH1tYOREsQ22OKDYofGebrNm6CTPUcvLvZm63NL/vzCYdjf9CUhqmA== + dependencies: + history "^5.2.0" + react-router "6.2.1" + +react-router@6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.2.1.tgz#be2a97a6006ce1d9123c28934e604faef51448a3" + integrity sha512-2fG0udBtxou9lXtK97eJeET2ki5//UWfQSl1rlJ7quwe6jrktK9FCCc8dQb5QY6jAv3jua8bBQRhhDOM/kVRsg== + dependencies: + history "^5.2.0" + react-scripts@4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-4.0.3.tgz#b1cafed7c3fa603e7628ba0f187787964cb5d345" -- 2.40.1