Split the web player into it's own repo.

https://github.com/kixelated/moq-js
This commit is contained in:
Luke Curley 2023-05-24 13:29:18 -07:00
parent fc958e11ae
commit 38ea8983d4
66 changed files with 33 additions and 4406 deletions

View File

@ -1,4 +1,4 @@
name: server
name: moq.rs
on:
pull_request:
@ -11,10 +11,6 @@ jobs:
check:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./server
steps:
- uses: actions/checkout@v3

View File

@ -1,29 +0,0 @@
name: web
on:
pull_request:
branches: [ "main" ]
jobs:
check:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./web
steps:
- uses: actions/checkout@v3
- name: install
run: yarn install
- name: cert
working-directory: cert
run: ./generate
- name: build
run: yarn build
- name: lint
run: yarn lint

4
.gitignore vendored
View File

@ -1,3 +1,3 @@
*.mp4
logs/
.DS_Store
target/
logs/

View File

View File

@ -1,64 +1,55 @@
# Warp
Live media delivery protocol utilizing QUIC streams. See the [Warp draft](https://datatracker.ietf.org/doc/draft-lcurley-warp/).
# Media over QUIC
Warp works by delivering media over independent QUIC stream. These streams are assigned a priority such that old video will arrive last and can be dropped. This avoids buffering in many cases, offering the viewer a potentially better experience.
Media over QUIC (MoQ) is a live media delivery protocol utilizing QUIC streams.
See the [Warp draft](https://datatracker.ietf.org/doc/draft-lcurley-warp/).
This demo requires WebTransport and WebCodecs, which currently (May 2023) only works on Chrome.
# Development
## Easy Mode
Requires Docker *only*.
```
docker-compose up --build
```
Then open [https://localhost:4444/](https://localhost:4444) in a browser. You'll have to click past the TLS error, but that's the price you pay for being lazy. Follow the more in-depth instructions if you want a better development experience.
This repository is a Rust server that supports both contribution (ingest) and distribution (playback).
It requires a client, such as [moq-js](https://github.com/kixelated/moq-js).
## Requirements
* Go
* Rust
* ffmpeg
* openssl
* Chrome
- _Chrome_: currently (May 2023) the only browser to support both WebTransport and WebCodecs.
- _yarn_: required to install dependencies.
## Requirements
- _rust_: duh
- _ffmpeg_: (optional) used to generate fragmented media
- _go_: (optional) used to generate self-signed certificates
- _openssl_: (options) ...also used to generate self-signed certificates
## Media
This demo simulates a live stream by reading a file from disk and sleeping based on media timestamps. Obviously you should hook this up to a real live stream to do anything useful.
Download your favorite media file and convert it to fragmented MP4:
Download your favorite media file and convert it to fragmented MP4, by default `media/fragmented.mp4`:
```
wget http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4 -O media/source.mp4
./media/fragment
./media/generate
```
## Certificates
Unfortunately, QUIC mandates TLS and makes local development difficult.
If you have a valid certificate you can use it instead of self-signing.
Otherwise, we use [mkcert](https://github.com/FiloSottile/mkcert) to install a self-signed CA:
```
./generate/cert
./cert/generate
```
With no arguments, the server will generate self-signed cert using this root CA. This certificate is only valid for *2 weeks* due to how WebTransport performs certificate fingerprinting.
With no arguments, the server will generate self-signed cert using this root CA.
This certificate is only valid for _2 weeks_ due to how WebTransport performs certificate fingerprinting.
## Server
The Warp server supports WebTransport, pushing media over streams once a connection has been established. A more refined implementation would load content based on the WebTransport URL or some other messaging scheme.
`The Warp server supports WebTransport, pushing media over streams once a connection has been established.
```
cd server
cargo run
```
This listens for WebTransport connections (not HTTP) on `https://localhost:4443` by default.
## Web
The web assets need to be hosted with a HTTPS server.
```
cd web
yarn install
yarn serve
```
These can be accessed on `https://localhost:4444` by default.
This listens for WebTransport connections on `https://localhost:4443` by default.
Use a [MoQ client](https://github.com/kixelated/moq-js) to connect to the server.

View File

@ -1,3 +0,0 @@
*.crt
*.key
*.hex

View File

@ -1,22 +0,0 @@
# Use ubuntu because it's ez
FROM ubuntu:latest
WORKDIR /build
# Use openssl and golang to generate certificates
RUN apt-get update && \
apt-get install -y ca-certificates openssl golang xxd
# Download the go modules
COPY go.mod go.sum ./
RUN go mod download
# Copy over the remaining files.
COPY . .
# Save the certificates to a volume
VOLUME /cert
# TODO support an output directory
CMD ./generate && cp localhost.* /cert

View File

@ -17,4 +17,5 @@ go run filippo.io/mkcert -ecdsa -install
go run filippo.io/mkcert -ecdsa -days 10 -cert-file "$CRT" -key-file "$KEY" localhost 127.0.0.1 ::1
# Compute the sha256 fingerprint of the certificate for WebTransport
# TODO remove openssl as a requirement
openssl x509 -in "$CRT" -outform der | openssl dgst -sha256 -binary | xxd -p -c 256 > localhost.hex

View File

@ -1,45 +0,0 @@
version: '3'
services:
# Generate certificates only valid for 14 days.
cert:
build: ./cert
volumes:
- cert:/cert
# Generate a fragmented MP4 file for testing.
media:
build: ./media
volumes:
- media:/media
# Serve the web code once we have certificates.
web:
build: ./web
ports:
- "4444:4444"
volumes:
- cert:/cert
depends_on:
cert:
condition: service_completed_successfully
# Run the server once we have certificates and media.
server:
build: ./server
environment:
- RUST_LOG=debug
ports:
- "4443:4443/udp"
volumes:
- cert:/cert
- media:/media
depends_on:
cert:
condition: service_completed_successfully
media:
condition: service_completed_successfully
volumes:
cert:
media:

View File

@ -1 +0,0 @@
fragmented.mp4

2
media/.gitignore vendored
View File

@ -1,3 +1 @@
*.mp4
*.mpd
*.m4s

View File

@ -1,25 +0,0 @@
# Create a build image
FROM ubuntu:latest
# Create the working directory.
WORKDIR /build
# Install necessary packages
RUN apt-get update && \
apt-get install -y \
ca-certificates \
wget \
ffmpeg
# Download a file from the internet, in this case my boy big buck bunny
RUN wget http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4 -O source.mp4
# Copy an run a script to create a fragmented mp4 (more overhead, easier to split)
COPY fragment .
# Create a media volume
VOLUME /media
# Fragment the media
# TODO support an output directory
CMD ./fragment && cp fragmented.mp4 /media

View File

@ -1 +0,0 @@
target

1
server/.gitignore vendored
View File

@ -1 +0,0 @@
target

View File

@ -1,42 +0,0 @@
# Use the official Rust image as the base image
FROM rust:latest as build
# Quiche requires docker
RUN apt-get update && \
apt-get install -y cmake
# Set the build directory
WORKDIR /warp
# Create an empty project
RUN cargo init --bin
# Copy the Cargo.toml and Cargo.lock files to the container
COPY Cargo.toml Cargo.lock ./
# Build the empty project so we download/cache dependencies
RUN cargo build --release
# Copy the entire project to the container
COPY . .
# Build the project
RUN cargo build --release
# Make a new image to run the binary
FROM ubuntu:latest
# Use a volume to access certificates
VOLUME /cert
# Use another volume to access the media
VOLUME /media
# Expose port 4443 for the server
EXPOSE 4443/udp
# Copy the built binary
COPY --from=build /warp/target/release/warp /bin
# Set the startup command to run the binary
CMD warp --cert /cert/localhost.crt --key /cert/localhost.key --media /media/fragmented.mp4

BIN
src/.DS_Store vendored Normal file

Binary file not shown.

View File

@ -1,4 +0,0 @@
dist
.parcel-cache
node_modules
fingerprint.hex

View File

@ -1,23 +0,0 @@
/* eslint-env node */
module.exports = {
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "prettier"],
parser: "@typescript-eslint/parser",
plugins: ["@typescript-eslint", "prettier"],
root: true,
ignorePatterns: ["dist", "node_modules"],
rules: {
"@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-explicit-any": "off",
"no-unused-vars": "off", // note you must disable the base rule as it can report incorrect errors
"@typescript-eslint/no-unused-vars": [
"warn", // or "error"
{
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
caughtErrorsIgnorePattern: "^_",
},
],
"prettier/prettier": 2, // Means error
},
}

3
web/.gitignore vendored
View File

@ -1,3 +0,0 @@
node_modules
.parcel-cache
dist

View File

@ -1,2 +0,0 @@
dist
node_modules

View File

@ -1,4 +0,0 @@
# note: root .editorconfig is used
# Don't insert semi-colons unless needed
semi: false

View File

@ -1,7 +0,0 @@
module.exports = function (app) {
app.use((req, res, next) => {
res.setHeader("Cross-Origin-Opener-Policy", "same-origin")
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp")
next()
})
}

View File

@ -1,26 +0,0 @@
# Use the official Node.js image as the build image
FROM node:latest
# Set the build directory
WORKDIR /build
# Copy the package.json and yarn.lock files to the container
COPY package*.json yarn.lock ./
# Install dependencies
RUN yarn install
# Copy the entire project to the container
COPY . .
# Expose port 4444 for serving the project
EXPOSE 4444
# Copy the certificate hash before running
VOLUME /cert
# Make a symlink to the certificate fingerprint
RUN ln -s /cert/localhost.hex fingerprint.hex
# Copy the certificate fingerprint and start the web server
CMD yarn parcel serve --https --cert /cert/localhost.crt --key /cert/localhost.key --port 4444

View File

@ -1 +0,0 @@
../cert/localhost.hex

View File

@ -1,28 +0,0 @@
{
"license": "Apache-2.0",
"source": "src/index.html",
"scripts": {
"serve": "parcel serve --https --cert ../cert/localhost.crt --key ../cert/localhost.key --port 4444 --open",
"build": "parcel build",
"check": "tsc --noEmit",
"lint": "eslint .",
"fmt": "prettier --write ."
},
"devDependencies": {
"@parcel/transformer-inline-string": "2.8.3",
"@parcel/validator-typescript": "^2.6.0",
"@types/audioworklet": "^0.0.41",
"@types/dom-webcodecs": "^0.1.6",
"@typescript-eslint/eslint-plugin": "^5.59.7",
"@typescript-eslint/parser": "^5.59.7",
"eslint": "^8.41.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-prettier": "^4.2.1",
"parcel": "^2.8.0",
"prettier": "^2.8.8",
"typescript": "^5.0.4"
},
"dependencies": {
"mp4box": "^0.5.2"
}
}

View File

@ -1,104 +0,0 @@
import * as MP4 from "../mp4"
export class Encoder {
container: MP4.ISOFile
audio: AudioEncoder
video: VideoEncoder
constructor() {
this.container = new MP4.ISOFile()
this.audio = new AudioEncoder({
output: this.onAudio.bind(this),
error: console.warn,
})
this.video = new VideoEncoder({
output: this.onVideo.bind(this),
error: console.warn,
})
this.container.init()
this.audio.configure({
codec: "mp4a.40.2",
numberOfChannels: 2,
sampleRate: 44100,
// TODO bitrate
})
this.video.configure({
codec: "avc1.42002A", // TODO h.264 baseline
avc: { format: "avc" }, // or annexb
width: 1280,
height: 720,
// TODO bitrate
// TODO bitrateMode
// TODO framerate
// TODO latencyMode
})
}
onAudio(frame: EncodedAudioChunk, metadata: EncodedAudioChunkMetadata) {
const config = metadata.decoderConfig!
const track_id = 1
if (!this.container.getTrackById(track_id)) {
this.container.addTrack({
id: track_id,
type: "mp4a", // TODO wrong
timescale: 1000, // TODO verify
channel_count: config.numberOfChannels,
samplerate: config.sampleRate,
description: config.description, // TODO verify
// TODO description_boxes?: Box[];
})
}
const buffer = new Uint8Array(frame.byteLength)
frame.copyTo(buffer)
// TODO cts?
const sample = this.container.addSample(track_id, buffer, {
is_sync: frame.type == "key",
duration: frame.duration!,
dts: frame.timestamp,
})
const _stream = this.container.createSingleSampleMoof(sample)
}
onVideo(frame: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata) {
const config = metadata!.decoderConfig!
const track_id = 2
if (!this.container.getTrackById(track_id)) {
this.container.addTrack({
id: 2,
type: "avc1",
width: config.codedWidth,
height: config.codedHeight,
timescale: 1000, // TODO verify
description: config.description, // TODO verify
// TODO description_boxes?: Box[];
})
}
const buffer = new Uint8Array(frame.byteLength)
frame.copyTo(buffer)
// TODO cts?
const sample = this.container.addSample(track_id, buffer, {
is_sync: frame.type == "key",
duration: frame.duration!,
dts: frame.timestamp,
})
const _stream = this.container.createSingleSampleMoof(sample)
}
}

View File

@ -1,5 +0,0 @@
export default class Broadcaster {
constructor() {
// TODO
}
}

View File

@ -1,75 +0,0 @@
html,
body,
#player {
width: 100%;
}
body {
background: #000000;
color: #ffffff;
padding: 0;
margin: 0;
display: flex;
justify-content: center;
font-family: sans-serif;
}
#screen {
position: relative;
}
#screen #play {
position: absolute;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.5);
display: flex;
justify-content: center;
align-items: center;
z-index: 1;
}
#controls {
display: flex;
flex-wrap: wrap;
padding: 8px 16px;
}
#controls > * {
margin-right: 8px;
}
#controls label {
margin-right: 8px;
}
#stats {
display: grid;
grid-template-columns: auto 1fr;
}
#stats label {
padding: 0 1rem;
}
.buffer {
position: relative;
width: 100%;
}
.buffer .fill {
position: absolute;
transition-duration: 0.1s;
transition-property: left, right, background-color;
background-color: RebeccaPurple;
height: 100%;
text-align: right;
padding-right: 0.5rem;
overflow: hidden;
}
.buffer .fill.net {
background-color: Purple;
}

View File

@ -1,33 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>WARP</title>
<link rel="stylesheet" href="index.css" />
</head>
<body>
<div id="player">
<div id="screen">
<div id="play"><span>click to play</span></div>
<canvas id="video" width="1280" height="720"></canvas>
</div>
<div id="controls">
<button type="button" id="live">Go Live</button>
<button type="button" id="throttle">Throttle: None</button>
</div>
<div id="stats">
<label>Audio Buffer:</label>
<div class="audio buffer"></div>
<label>Video Buffer:</label>
<div class="video buffer"></div>
</div>
</div>
<script src="index.ts" type="module"></script>
</body>
</html>

View File

@ -1,42 +0,0 @@
import Player from "./player"
import Transport from "./transport"
// @ts-ignore embed the certificate fingerprint using bundler
import fingerprintHex from "bundle-text:../fingerprint.hex"
// Convert the hex to binary.
const fingerprint = []
for (let c = 0; c < fingerprintHex.length - 1; c += 2) {
fingerprint.push(parseInt(fingerprintHex.substring(c, c + 2), 16))
}
const params = new URLSearchParams(window.location.search)
const url = params.get("url") || "https://localhost:4443/watch"
const canvas = document.querySelector<HTMLCanvasElement>("canvas#video")!
const transport = new Transport({
url: url,
fingerprint: {
// TODO remove when Chrome accepts the system CA
algorithm: "sha-256",
value: new Uint8Array(fingerprint),
},
})
const player = new Player({
transport,
canvas: canvas.transferControlToOffscreen(),
})
const play = document.querySelector<HTMLElement>("#screen #play")!
const playFunc = (e: Event) => {
player.play()
e.preventDefault()
play.removeEventListener("click", playFunc)
play.style.display = "none"
}
play.addEventListener("click", playFunc)

View File

@ -1,14 +0,0 @@
// Rename some stuff so it's on brand.
export {
createFile as New,
MP4File as File,
MP4ArrayBuffer as ArrayBuffer,
MP4Info as Info,
MP4Track as Track,
MP4AudioTrack as AudioTrack,
MP4VideoTrack as VideoTrack,
DataStream as Stream,
Box,
ISOFile,
Sample,
} from "mp4box"

View File

@ -1,231 +0,0 @@
// https://github.com/gpac/mp4box.js/issues/233
declare module "mp4box" {
export interface MP4MediaTrack {
id: number
created: Date
modified: Date
movie_duration: number
layer: number
alternate_group: number
volume: number
track_width: number
track_height: number
timescale: number
duration: number
bitrate: number
codec: string
language: string
nb_samples: number
}
export interface MP4VideoData {
width: number
height: number
}
export interface MP4VideoTrack extends MP4MediaTrack {
video: MP4VideoData
}
export interface MP4AudioData {
sample_rate: number
channel_count: number
sample_size: number
}
export interface MP4AudioTrack extends MP4MediaTrack {
audio: MP4AudioData
}
export type MP4Track = MP4VideoTrack | MP4AudioTrack
export interface MP4Info {
duration: number
timescale: number
fragment_duration: number
isFragmented: boolean
isProgressive: boolean
hasIOD: boolean
brands: string[]
created: Date
modified: Date
tracks: MP4Track[]
mime: string
audioTracks: MP4AudioTrack[]
videoTracks: MP4VideoTrack[]
}
export type MP4ArrayBuffer = ArrayBuffer & { fileStart: number }
export interface MP4File {
onMoovStart?: () => void
onReady?: (info: MP4Info) => void
onError?: (e: string) => void
onSamples?: (id: number, user: any, samples: Sample[]) => void
appendBuffer(data: MP4ArrayBuffer): number
start(): void
stop(): void
flush(): void
setExtractionOptions(id: number, user: any, options: ExtractionOptions): void
}
export function createFile(): MP4File
export interface Sample {
number: number
track_id: number
timescale: number
description_index: number
description: any
data: ArrayBuffer
size: number
alreadyRead?: number
duration: number
cts: number
dts: number
is_sync: boolean
is_leading: number
depends_on: number
is_depended_on: number
has_redundancy: number
degration_priority: number
offset: number
subsamples: any
}
export interface ExtractionOptions {
nbSamples: number
}
const BIG_ENDIAN: boolean
const LITTLE_ENDIAN: boolean
export class DataStream {
constructor(buffer?: ArrayBuffer, byteOffset?: number, littleEndian?: boolean)
getPosition(): number
get byteLength(): number
get buffer(): ArrayBuffer
set buffer(v: ArrayBuffer)
get byteOffset(): number
set byteOffset(v: number)
get dataView(): DataView
set dataView(v: DataView)
seek(pos: number): void
isEof(): boolean
mapUint8Array(length: number): Uint8Array
readInt32Array(length: number, littleEndian: boolean): Int32Array
readInt16Array(length: number, littleEndian: boolean): Int16Array
readInt8Array(length: number): Int8Array
readUint32Array(length: number, littleEndian: boolean): Uint32Array
readUint16Array(length: number, littleEndian: boolean): Uint16Array
readUint8Array(length: number): Uint8Array
readFloat64Array(length: number, littleEndian: boolean): Float64Array
readFloat32Array(length: number, littleEndian: boolean): Float32Array
readInt32(littleEndian: boolean): number
readInt16(littleEndian: boolean): number
readInt8(): number
readUint32(littleEndian: boolean): number
readUint16(littleEndian: boolean): number
readUint8(): number
readFloat32(littleEndian: boolean): number
readFloat64(littleEndian: boolean): number
endianness: boolean
memcpy(
dst: ArrayBufferLike,
dstOffset: number,
src: ArrayBufferLike,
srcOffset: number,
byteLength: number
): void
// TODO I got bored porting the remaining functions
}
export class Box {
write(stream: DataStream): void
}
export interface TrackOptions {
id?: number
type?: string
width?: number
height?: number
duration?: number
layer?: number
timescale?: number
media_duration?: number
language?: string
hdlr?: string
// video
avcDecoderConfigRecord?: any
// audio
balance?: number
channel_count?: number
samplesize?: number
samplerate?: number
//captions
namespace?: string
schema_location?: string
auxiliary_mime_types?: string
description?: any
description_boxes?: Box[]
default_sample_description_index_id?: number
default_sample_duration?: number
default_sample_size?: number
default_sample_flags?: number
}
export interface FileOptions {
brands?: string[]
timescale?: number
rate?: number
duration?: number
width?: number
}
export interface SampleOptions {
sample_description_index?: number
duration?: number
cts?: number
dts?: number
is_sync?: boolean
is_leading?: number
depends_on?: number
is_depended_on?: number
has_redundancy?: number
degradation_priority?: number
subsamples?: any
}
// TODO add the remaining functions
// TODO move to another module
export class ISOFile {
constructor(stream?: DataStream)
init(options?: FileOptions): ISOFile
addTrack(options?: TrackOptions): number
addSample(track: number, data: ArrayBuffer, options?: SampleOptions): Sample
createSingleSampleMoof(sample: Sample): Box
// helpers
getTrackById(id: number): Box | undefined
getTrexById(id: number): Box | undefined
}
export {}
}

View File

@ -1,79 +0,0 @@
import * as Message from "./message"
import { Ring } from "./ring"
export default class Audio {
ring?: Ring
queue: Array<AudioData>
render?: number // non-zero if requestAnimationFrame has been called
last?: number // the timestamp of the last rendered frame, in microseconds
constructor(_config: Message.Config) {
this.queue = []
}
push(frame: AudioData) {
// Drop any old frames
if (this.last && frame.timestamp <= this.last) {
frame.close()
return
}
// Insert the frame into the queue sorted by timestamp.
if (this.queue.length > 0 && this.queue[this.queue.length - 1].timestamp <= frame.timestamp) {
// Fast path because we normally append to the end.
this.queue.push(frame)
} else {
// Do a full binary search
let low = 0
let high = this.queue.length
while (low < high) {
const mid = (low + high) >>> 1
if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1
else high = mid
}
this.queue.splice(low, 0, frame)
}
this.emit()
}
emit() {
const ring = this.ring
if (!ring) {
return
}
while (this.queue.length) {
const frame = this.queue[0]
if (ring.size() + frame.numberOfFrames > ring.capacity) {
// Buffer is full
break
}
const size = ring.write(frame)
if (size < frame.numberOfFrames) {
throw new Error("audio buffer is full")
}
this.last = frame.timestamp
frame.close()
this.queue.shift()
}
}
play(play: Message.Play) {
this.ring = new Ring(play.buffer)
if (!this.render) {
const sampleRate = 44100 // TODO dynamic
// Refresh every half buffer
const refresh = ((play.buffer.capacity / sampleRate) * 1000) / 2
this.render = setInterval(this.emit.bind(this), refresh)
}
}
}

View File

@ -1,214 +0,0 @@
import * as Message from "./message"
import * as MP4 from "../mp4"
import * as Stream from "../stream"
import Renderer from "./renderer"
import { Deferred } from "../util"
export default class Decoder {
decoders: Map<number, AudioDecoder | VideoDecoder>
renderer: Renderer
init: Deferred<MP4.ArrayBuffer[]>
constructor(renderer: Renderer) {
this.init = new Deferred()
this.decoders = new Map()
this.renderer = renderer
}
async receiveInit(msg: Message.Init) {
const init = new Array<MP4.ArrayBuffer>()
let offset = 0
const stream = new Stream.Reader(msg.reader, msg.buffer)
for (;;) {
const data = await stream.read()
if (!data) break
// Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately
const box = new Uint8Array(data.byteLength)
box.set(data)
// and for some reason we need to modify the underlying ArrayBuffer with fileStart
const buffer = box.buffer as MP4.ArrayBuffer
buffer.fileStart = offset
// Add the box to our queue of chunks
init.push(buffer)
offset += data.byteLength
}
this.init.resolve(init)
}
async receiveSegment(msg: Message.Segment) {
// Wait for the init segment to be fully received and parsed
const input = MP4.New()
input.onSamples = this.onSamples.bind(this)
input.onReady = (info: MP4.Info) => {
// Extract all of the tracks, because we don't know if it's audio or video.
for (const track of info.tracks) {
input.setExtractionOptions(track.id, track, { nbSamples: 1 })
}
input.start()
}
// MP4box requires us to parse the init segment for each segment unfortunately
// TODO If this sees production usage, I would recommend caching this somehow.
let offset = 0
const init = await this.init.promise
for (const raw of init) {
offset = input.appendBuffer(raw)
}
const stream = new Stream.Reader(msg.reader, msg.buffer)
// For whatever reason, mp4box doesn't work until you read an atom at a time.
while (!(await stream.done())) {
const raw = await stream.peek(4)
// TODO this doesn't support when size = 0 (until EOF) or size = 1 (extended size)
const size = new DataView(raw.buffer, raw.byteOffset, raw.byteLength).getUint32(0)
const atom = await stream.bytes(size)
// Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately
const box = new Uint8Array(atom.byteLength)
box.set(atom)
// and for some reason we need to modify the underlying ArrayBuffer with offset
const buffer = box.buffer as MP4.ArrayBuffer
buffer.fileStart = offset
// Parse the data
offset = input.appendBuffer(buffer)
input.flush()
}
}
onSamples(_track_id: number, track: MP4.Track, samples: MP4.Sample[]) {
if (!track.track_width) {
// TODO ignoring audio to debug
return
}
let decoder
if (isVideoTrack(track)) {
// We need a sample to initalize the video decoder, because of mp4box limitations.
decoder = this.videoDecoder(track, samples[0])
} else if (isAudioTrack(track)) {
decoder = this.audioDecoder(track)
} else {
throw new Error("unknown track type")
}
for (const sample of samples) {
// Convert to microseconds
const timestamp = (1000 * 1000 * sample.dts) / sample.timescale
const duration = (1000 * 1000 * sample.duration) / sample.timescale
if (!decoder) {
throw new Error("decoder not initialized")
} else if (isAudioDecoder(decoder)) {
decoder.decode(
new EncodedAudioChunk({
type: sample.is_sync ? "key" : "delta",
data: sample.data,
duration: duration,
timestamp: timestamp,
})
)
} else if (isVideoDecoder(decoder)) {
decoder.decode(
new EncodedVideoChunk({
type: sample.is_sync ? "key" : "delta",
data: sample.data,
duration: duration,
timestamp: timestamp,
})
)
} else {
throw new Error("unknown decoder type")
}
}
}
audioDecoder(track: MP4.AudioTrack): AudioDecoder {
// Reuse the audio decoder when possible to avoid glitches.
// TODO detect when the codec changes and make a new decoder.
const decoder = this.decoders.get(track.id)
if (decoder && isAudioDecoder(decoder)) {
return decoder
}
const audioDecoder = new AudioDecoder({
output: this.renderer.push.bind(this.renderer),
error: console.error,
})
audioDecoder.configure({
codec: track.codec,
numberOfChannels: track.audio.channel_count,
sampleRate: track.audio.sample_rate,
})
this.decoders.set(track.id, audioDecoder)
return audioDecoder
}
videoDecoder(track: MP4.VideoTrack, sample: MP4.Sample): VideoDecoder {
// Make a new video decoder for each keyframe.
if (!sample.is_sync) {
const decoder = this.decoders.get(track.id)
if (decoder && isVideoDecoder(decoder)) {
return decoder
}
}
// Configure the decoder using the AVC box for H.264
// TODO it should be easy to support other codecs, just need to know the right boxes.
const avcc = sample.description.avcC
if (!avcc) throw new Error("TODO only h264 is supported")
const description = new MP4.Stream(new Uint8Array(avcc.size), 0, false)
avcc.write(description)
const videoDecoder = new VideoDecoder({
output: this.renderer.push.bind(this.renderer),
error: console.error,
})
videoDecoder.configure({
codec: track.codec,
codedHeight: track.video.height,
codedWidth: track.video.width,
description: description.buffer?.slice(8),
// optimizeForLatency: true
})
this.decoders.set(track.id, videoDecoder)
return videoDecoder
}
}
function isAudioDecoder(decoder: AudioDecoder | VideoDecoder): decoder is AudioDecoder {
return decoder instanceof AudioDecoder
}
function isVideoDecoder(decoder: AudioDecoder | VideoDecoder): decoder is VideoDecoder {
return decoder instanceof VideoDecoder
}
function isAudioTrack(track: MP4.Track): track is MP4.AudioTrack {
return (track as MP4.AudioTrack).audio !== undefined
}
function isVideoTrack(track: MP4.Track): track is MP4.VideoTrack {
return (track as MP4.VideoTrack).video !== undefined
}

View File

@ -1,88 +0,0 @@
import * as Message from "./message"
import * as Ring from "./ring"
import Transport from "../transport"
export interface Config {
transport: Transport
canvas: OffscreenCanvas
}
// This class must be created on the main thread due to AudioContext.
export default class Player {
context: AudioContext
worker: Worker
worklet: Promise<AudioWorkletNode>
transport: Transport
constructor(config: Config) {
this.transport = config.transport
this.transport.callback = this
this.context = new AudioContext({
latencyHint: "interactive",
sampleRate: 44100,
})
this.worker = this.setupWorker(config)
this.worklet = this.setupWorklet(config)
}
private setupWorker(config: Config): Worker {
const url = new URL("worker.ts", import.meta.url)
const worker = new Worker(url, {
type: "module",
name: "media",
})
const msg = {
canvas: config.canvas,
}
worker.postMessage({ config: msg }, [msg.canvas])
return worker
}
private async setupWorklet(_config: Config): Promise<AudioWorkletNode> {
// Load the worklet source code.
const url = new URL("worklet.ts", import.meta.url)
await this.context.audioWorklet.addModule(url)
const volume = this.context.createGain()
volume.gain.value = 2.0
// Create a worklet
const worklet = new AudioWorkletNode(this.context, "renderer")
worklet.onprocessorerror = (e: Event) => {
console.error("Audio worklet error:", e)
}
// Connect the worklet to the volume node and then to the speakers
worklet.connect(volume)
volume.connect(this.context.destination)
return worklet
}
onInit(init: Message.Init) {
this.worker.postMessage({ init }, [init.buffer.buffer, init.reader])
}
onSegment(segment: Message.Segment) {
this.worker.postMessage({ segment }, [segment.buffer.buffer, segment.reader])
}
async play() {
this.context.resume()
const play = {
buffer: new Ring.Buffer(2, 44100 / 10), // 100ms of audio
}
const worklet = await this.worklet
worklet.port.postMessage({ play })
this.worker.postMessage({ play })
}
}

View File

@ -1,21 +0,0 @@
import * as Ring from "./ring"
export interface Config {
// video stuff
canvas: OffscreenCanvas
}
export interface Init {
buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data
}
export interface Segment {
buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data
}
export interface Play {
timestamp?: number
buffer: Ring.Buffer
}

View File

@ -1,36 +0,0 @@
import * as Message from "./message"
import Audio from "./audio"
import Video from "./video"
export default class Renderer {
audio: Audio
video: Video
constructor(config: Message.Config) {
this.audio = new Audio(config)
this.video = new Video(config)
}
push(frame: AudioData | VideoFrame) {
if (isAudioData(frame)) {
this.audio.push(frame)
} else if (isVideoFrame(frame)) {
this.video.push(frame)
} else {
throw new Error("unknown frame type")
}
}
play(play: Message.Play) {
this.audio.play(play)
this.video.play(play)
}
}
function isAudioData(frame: AudioData | VideoFrame): frame is AudioData {
return frame instanceof AudioData
}
function isVideoFrame(frame: AudioData | VideoFrame): frame is VideoFrame {
return frame instanceof VideoFrame
}

View File

@ -1,155 +0,0 @@
// Ring buffer with audio samples.
enum STATE {
READ_POS = 0, // The current read position
WRITE_POS, // The current write position
LENGTH, // Clever way of saving the total number of enums values.
}
// No prototype to make this easier to send via postMessage
export class Buffer {
state: SharedArrayBuffer
channels: SharedArrayBuffer[]
capacity: number
constructor(channels: number, capacity: number) {
// Store the current state in a separate ring buffer.
this.state = new SharedArrayBuffer(STATE.LENGTH * Int32Array.BYTES_PER_ELEMENT)
// Create a buffer for each audio channel
this.channels = []
for (let i = 0; i < channels; i += 1) {
const buffer = new SharedArrayBuffer(capacity * Float32Array.BYTES_PER_ELEMENT)
this.channels.push(buffer)
}
this.capacity = capacity
}
}
export class Ring {
state: Int32Array
channels: Float32Array[]
capacity: number
constructor(buffer: Buffer) {
this.state = new Int32Array(buffer.state)
this.channels = []
for (const channel of buffer.channels) {
this.channels.push(new Float32Array(channel))
}
this.capacity = buffer.capacity
}
// Write samples for single audio frame, returning the total number written.
write(frame: AudioData): number {
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = writePos
let endPos = writePos + frame.numberOfFrames
if (endPos > readPos + this.capacity) {
endPos = readPos + this.capacity
if (endPos <= startPos) {
// No space to write
return 0
}
}
const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity
// Loop over each channel
for (let i = 0; i < this.channels.length; i += 1) {
const channel = this.channels[i]
if (startIndex < endIndex) {
// One continuous range to copy.
const full = channel.subarray(startIndex, endIndex)
frame.copyTo(full, {
planeIndex: i,
frameCount: endIndex - startIndex,
})
} else {
const first = channel.subarray(startIndex)
const second = channel.subarray(0, endIndex)
frame.copyTo(first, {
planeIndex: i,
frameCount: first.length,
})
// We need this conditional when startIndex == 0 and endIndex == 0
// When capacity=4410 and frameCount=1024, this was happening 52s into the audio.
if (second.length) {
frame.copyTo(second, {
planeIndex: i,
frameOffset: first.length,
frameCount: second.length,
})
}
}
}
Atomics.store(this.state, STATE.WRITE_POS, endPos)
return endPos - startPos
}
read(dst: Float32Array[]): number {
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = readPos
let endPos = startPos + dst[0].length
if (endPos > writePos) {
endPos = writePos
if (endPos <= startPos) {
// Nothing to read
return 0
}
}
const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity
// Loop over each channel
for (let i = 0; i < dst.length; i += 1) {
if (i >= this.channels.length) {
// ignore excess channels
}
const input = this.channels[i]
const output = dst[i]
if (startIndex < endIndex) {
const full = input.subarray(startIndex, endIndex)
output.set(full)
} else {
const first = input.subarray(startIndex)
const second = input.subarray(0, endIndex)
output.set(first)
output.set(second, first.length)
}
}
Atomics.store(this.state, STATE.READ_POS, endPos)
return endPos - startPos
}
size() {
// TODO is this thread safe?
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
return writePos - readPos
}
}

View File

@ -1,98 +0,0 @@
import * as Message from "./message"
export default class Video {
canvas: OffscreenCanvas
queue: Array<VideoFrame>
render: number // non-zero if requestAnimationFrame has been called
sync?: number // the wall clock value for timestamp 0, in microseconds
last?: number // the timestamp of the last rendered frame, in microseconds
constructor(config: Message.Config) {
this.canvas = config.canvas
this.queue = []
this.render = 0
}
push(frame: VideoFrame) {
// Drop any old frames
if (this.last && frame.timestamp <= this.last) {
frame.close()
return
}
// Insert the frame into the queue sorted by timestamp.
if (this.queue.length > 0 && this.queue[this.queue.length - 1].timestamp <= frame.timestamp) {
// Fast path because we normally append to the end.
this.queue.push(frame)
} else {
// Do a full binary search
let low = 0
let high = this.queue.length
while (low < high) {
const mid = (low + high) >>> 1
if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1
else high = mid
}
this.queue.splice(low, 0, frame)
}
}
draw(now: number) {
// Draw and then queue up the next draw call.
this.drawOnce(now)
// Queue up the new draw frame.
this.render = self.requestAnimationFrame(this.draw.bind(this))
}
drawOnce(now: number) {
// Convert to microseconds
now *= 1000
if (!this.queue.length) {
return
}
let frame = this.queue[0]
if (!this.sync) {
this.sync = now - frame.timestamp
}
// Determine the target timestamp.
const target = now - this.sync
if (frame.timestamp >= target) {
// nothing to render yet, wait for the next animation frame
return
}
this.queue.shift()
// Check if we should skip some frames
while (this.queue.length) {
const next = this.queue[0]
if (next.timestamp > target) break
frame.close()
frame = this.queue.shift()!
}
const ctx = this.canvas.getContext("2d")
ctx!.drawImage(frame, 0, 0, this.canvas.width, this.canvas.height) // TODO aspect ratio
this.last = frame.timestamp
frame.close()
}
play(_play: Message.Play) {
// Queue up to render the next frame.
if (!this.render) {
this.render = self.requestAnimationFrame(this.draw.bind(this))
}
}
}

View File

@ -1,24 +0,0 @@
import Renderer from "./renderer"
import Decoder from "./decoder"
import * as Message from "./message"
let decoder: Decoder
let renderer: Renderer
self.addEventListener("message", async (e: MessageEvent) => {
if (e.data.config) {
const config = e.data.config as Message.Config
renderer = new Renderer(config)
decoder = new Decoder(renderer)
} else if (e.data.init) {
const init = e.data.init as Message.Init
await decoder.receiveInit(init)
} else if (e.data.segment) {
const segment = e.data.segment as Message.Segment
await decoder.receiveSegment(segment)
} else if (e.data.play) {
const play = e.data.play as Message.Play
await renderer.play(play)
}
})

View File

@ -1,53 +0,0 @@
// This is an AudioWorklet that acts as a media source.
// The renderer copies audio samples to a ring buffer read by this worklet.
// The worklet then outputs those samples to emit audio.
import * as Message from "./message"
import { Ring } from "./ring"
class Renderer extends AudioWorkletProcessor {
ring?: Ring
base: number
constructor(_params: AudioWorkletNodeOptions) {
// The super constructor call is required.
super()
this.base = 0
this.port.onmessage = this.onMessage.bind(this)
}
onMessage(e: MessageEvent) {
if (e.data.play) {
this.onPlay(e.data.play)
}
}
onPlay(play: Message.Play) {
this.ring = new Ring(play.buffer)
}
// Inputs and outputs in groups of 128 samples.
process(inputs: Float32Array[][], outputs: Float32Array[][], _parameters: Record<string, Float32Array>): boolean {
if (!this.ring) {
// Paused
return true
}
if (inputs.length != 1 && outputs.length != 1) {
throw new Error("only a single track is supported")
}
const output = outputs[0]
const size = this.ring.read(output)
if (size < output.length) {
// TODO trigger rebuffering event
}
return true
}
}
registerProcessor("renderer", Renderer)

View File

@ -1,2 +0,0 @@
export { default as Reader } from "./reader"
export { default as Writer } from "./writer"

View File

@ -1,195 +0,0 @@
// Reader wraps a stream and provides convience methods for reading pieces from a stream
export default class Reader {
reader: ReadableStream
buffer: Uint8Array
constructor(reader: ReadableStream, buffer: Uint8Array = new Uint8Array(0)) {
this.reader = reader
this.buffer = buffer
}
// Returns any number of bytes
async read(): Promise<Uint8Array | undefined> {
if (this.buffer.byteLength) {
const buffer = this.buffer
this.buffer = new Uint8Array()
return buffer
}
const r = this.reader.getReader()
const result = await r.read()
r.releaseLock()
return result.value
}
async readAll(): Promise<Uint8Array> {
const r = this.reader.getReader()
for (;;) {
const result = await r.read()
if (result.done) {
break
}
const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) {
this.buffer = buffer
} else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength)
this.buffer = temp
}
}
const result = this.buffer
this.buffer = new Uint8Array()
r.releaseLock()
return result
}
async bytes(size: number): Promise<Uint8Array> {
const r = this.reader.getReader()
while (this.buffer.byteLength < size) {
const result = await r.read()
if (result.done) {
throw "short buffer"
}
const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) {
this.buffer = buffer
} else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength)
this.buffer = temp
}
}
const result = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset, size)
this.buffer = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + size)
r.releaseLock()
return result
}
async peek(size: number): Promise<Uint8Array> {
const r = this.reader.getReader()
while (this.buffer.byteLength < size) {
const result = await r.read()
if (result.done) {
throw "short buffer"
}
const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) {
this.buffer = buffer
} else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength)
this.buffer = temp
}
}
const result = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset, size)
r.releaseLock()
return result
}
async view(size: number): Promise<DataView> {
const buf = await this.bytes(size)
return new DataView(buf.buffer, buf.byteOffset, buf.byteLength)
}
async uint8(): Promise<number> {
const view = await this.view(1)
return view.getUint8(0)
}
async uint16(): Promise<number> {
const view = await this.view(2)
return view.getUint16(0)
}
async uint32(): Promise<number> {
const view = await this.view(4)
return view.getUint32(0)
}
// Returns a Number using 52-bits, the max Javascript can use for integer math
async uint52(): Promise<number> {
const v = await this.uint64()
if (v > Number.MAX_SAFE_INTEGER) {
throw "overflow"
}
return Number(v)
}
// Returns a Number using 52-bits, the max Javascript can use for integer math
async vint52(): Promise<number> {
const v = await this.vint64()
if (v > Number.MAX_SAFE_INTEGER) {
throw "overflow"
}
return Number(v)
}
// NOTE: Returns a BigInt instead of a Number
async uint64(): Promise<bigint> {
const view = await this.view(8)
return view.getBigUint64(0)
}
// NOTE: Returns a BigInt instead of a Number
async vint64(): Promise<bigint> {
const peek = await this.peek(1)
const first = new DataView(peek.buffer, peek.byteOffset, peek.byteLength).getUint8(0)
const size = (first & 0xc0) >> 6
switch (size) {
case 0: {
const v = await this.uint8()
return BigInt(v) & 0x3fn
}
case 1: {
const v = await this.uint16()
return BigInt(v) & 0x3fffn
}
case 2: {
const v = await this.uint32()
return BigInt(v) & 0x3fffffffn
}
case 3: {
const v = await this.uint64()
return v & 0x3fffffffffffffffn
}
default:
throw "impossible"
}
}
async done(): Promise<boolean> {
try {
await this.peek(1)
return false
} catch (err) {
return true // Assume EOF
}
}
}

View File

@ -1,100 +0,0 @@
// Writer wraps a stream and writes chunks of data
export default class Writer {
buffer: ArrayBuffer
writer: WritableStreamDefaultWriter
constructor(stream: WritableStream) {
this.buffer = new ArrayBuffer(8)
this.writer = stream.getWriter()
}
release() {
this.writer.releaseLock()
}
async close() {
return this.writer.close()
}
async uint8(v: number) {
const view = new DataView(this.buffer, 0, 1)
view.setUint8(0, v)
return this.writer.write(view)
}
async uint16(v: number) {
const view = new DataView(this.buffer, 0, 2)
view.setUint16(0, v)
return this.writer.write(view)
}
async uint24(v: number) {
const v1 = (v >> 16) & 0xff
const v2 = (v >> 8) & 0xff
const v3 = v & 0xff
const view = new DataView(this.buffer, 0, 3)
view.setUint8(0, v1)
view.setUint8(1, v2)
view.setUint8(2, v3)
return this.writer.write(view)
}
async uint32(v: number) {
const view = new DataView(this.buffer, 0, 4)
view.setUint32(0, v)
return this.writer.write(view)
}
async uint52(v: number) {
if (v > Number.MAX_SAFE_INTEGER) {
throw "value too large"
}
this.uint64(BigInt(v))
}
async vint52(v: number) {
if (v > Number.MAX_SAFE_INTEGER) {
throw "value too large"
}
if (v < 1 << 6) {
return this.uint8(v)
} else if (v < 1 << 14) {
return this.uint16(v | 0x4000)
} else if (v < 1 << 30) {
return this.uint32(v | 0x80000000)
} else {
return this.uint64(BigInt(v) | 0xc000000000000000n)
}
}
async uint64(v: bigint) {
const view = new DataView(this.buffer, 0, 8)
view.setBigUint64(0, v)
return this.writer.write(view)
}
async vint64(v: bigint) {
if (v < 1 << 6) {
return this.uint8(Number(v))
} else if (v < 1 << 14) {
return this.uint16(Number(v) | 0x4000)
} else if (v < 1 << 30) {
return this.uint32(Number(v) | 0x80000000)
} else {
return this.uint64(v | 0xc000000000000000n)
}
}
async bytes(buffer: ArrayBuffer) {
return this.writer.write(buffer)
}
async string(str: string) {
const data = new TextEncoder().encode(str)
return this.writer.write(data)
}
}

View File

@ -1,96 +0,0 @@
import * as Stream from "../stream"
import * as Interface from "./interface"
export interface Config {
url: string
fingerprint?: WebTransportHash // the certificate fingerprint, temporarily needed for local development
}
export default class Transport {
quic: Promise<WebTransport>
api: Promise<WritableStream>
callback?: Interface.Callback
constructor(config: Config) {
this.quic = this.connect(config)
// Create a unidirectional stream for all of our messages
this.api = this.quic.then((q) => {
return q.createUnidirectionalStream()
})
// async functions
this.receiveStreams()
}
async close() {
;(await this.quic).close()
}
// Helper function to make creating a promise easier
private async connect(config: Config): Promise<WebTransport> {
const options: WebTransportOptions = {}
if (config.fingerprint) {
options.serverCertificateHashes = [config.fingerprint]
}
const quic = new WebTransport(config.url, options)
await quic.ready
return quic
}
async sendMessage(msg: any) {
const payload = JSON.stringify(msg)
const size = payload.length + 8
const stream = await this.api
const writer = new Stream.Writer(stream)
await writer.uint32(size)
await writer.string("warp")
await writer.string(payload)
writer.release()
}
async receiveStreams() {
const q = await this.quic
const streams = q.incomingUnidirectionalStreams.getReader()
for (;;) {
const result = await streams.read()
if (result.done) break
const stream = result.value
this.handleStream(stream) // don't await
}
}
async handleStream(stream: ReadableStream) {
const r = new Stream.Reader(stream)
while (!(await r.done())) {
const size = await r.uint32()
const typ = new TextDecoder("utf-8").decode(await r.bytes(4))
if (typ != "warp") throw "expected warp atom"
if (size < 8) throw "atom too small"
const payload = new TextDecoder("utf-8").decode(await r.bytes(size - 8))
const msg = JSON.parse(payload)
if (msg.init) {
return this.callback?.onInit({
buffer: r.buffer,
reader: r.reader,
})
} else if (msg.segment) {
return this.callback?.onSegment({
buffer: r.buffer,
reader: r.reader,
})
} else {
console.warn("unknown message", msg)
}
}
}
}

View File

@ -1,14 +0,0 @@
export interface Callback {
onInit(init: Init): any
onSegment(segment: Segment): any
}
export interface Init {
buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data
}
export interface Segment {
buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data
}

View File

@ -1,7 +0,0 @@
// TODO fill in required fields
export type Init = any
export type Segment = any
export interface Debug {
max_bitrate: number
}

View File

@ -1,84 +0,0 @@
declare module "webtransport"
/*
There's no WebTransport support in TypeScript yet. Use this script to update definitions:
npx webidl2ts -i https://www.w3.org/TR/webtransport/ -o webtransport.d.ts
You'll have to fix the constructors by hand.
*/
interface WebTransportDatagramDuplexStream {
readonly readable: ReadableStream
readonly writable: WritableStream
readonly maxDatagramSize: number
incomingMaxAge: number
outgoingMaxAge: number
incomingHighWaterMark: number
outgoingHighWaterMark: number
}
interface WebTransport {
getStats(): Promise<WebTransportStats>
readonly ready: Promise<undefined>
readonly closed: Promise<WebTransportCloseInfo>
close(closeInfo?: WebTransportCloseInfo): undefined
readonly datagrams: WebTransportDatagramDuplexStream
createBidirectionalStream(): Promise<WebTransportBidirectionalStream>
readonly incomingBidirectionalStreams: ReadableStream
createUnidirectionalStream(): Promise<WritableStream>
readonly incomingUnidirectionalStreams: ReadableStream
}
declare const WebTransport: {
prototype: WebTransport
new (url: string, options?: WebTransportOptions): WebTransport
}
interface WebTransportHash {
algorithm?: string
value?: BufferSource
}
interface WebTransportOptions {
allowPooling?: boolean
serverCertificateHashes?: Array<WebTransportHash>
}
interface WebTransportCloseInfo {
closeCode?: number
reason?: string
}
interface WebTransportStats {
timestamp?: DOMHighResTimeStamp
bytesSent?: number
packetsSent?: number
numOutgoingStreamsCreated?: number
numIncomingStreamsCreated?: number
bytesReceived?: number
packetsReceived?: number
minRtt?: DOMHighResTimeStamp
numReceivedDatagramsDropped?: number
}
interface WebTransportBidirectionalStream {
readonly readable: ReadableStream
readonly writable: WritableStream
}
interface WebTransportError extends DOMException {
readonly source: WebTransportErrorSource
readonly streamErrorCode: number
}
declare const WebTransportError: {
prototype: WebTransportError
new (init?: WebTransportErrorInit): WebTransportError
}
interface WebTransportErrorInit {
streamErrorCode?: number
message?: string
}
type WebTransportErrorSource = "stream" | "session"

View File

@ -1,20 +0,0 @@
export default class Deferred<T> {
promise: Promise<T>
resolve: (value: T | PromiseLike<T>) => void
reject: (value: T | PromiseLike<T>) => void
constructor() {
// Set initial values so TS stops being annoying.
this.resolve = (_value: T | PromiseLike<T>) => {
/* noop */
}
this.reject = (_value: T | PromiseLike<T>) => {
/* noop */
}
this.promise = new Promise((resolve, reject) => {
this.resolve = resolve
this.reject = reject
})
}
}

View File

@ -1 +0,0 @@
export { default as Deferred } from "./deferred"

View File

@ -1,9 +0,0 @@
{
"include": ["src/**/*"],
"compilerOptions": {
"target": "es2022",
"module": "es2022",
"moduleResolution": "node",
"strict": true
}
}

File diff suppressed because it is too large Load Diff