Refactor the TS code a bit.

This commit is contained in:
Luke Curley 2023-05-08 10:30:32 -06:00
parent e6791b872d
commit 9f0c24b552
16 changed files with 238 additions and 179 deletions

View File

@ -6,8 +6,7 @@ cd "$(dirname "$0")"
# separate_moof: Splits audio and video into separate moof flags. # separate_moof: Splits audio and video into separate moof flags.
# omit_tfhd_offset: Removes absolute byte offsets so we can fragment. # omit_tfhd_offset: Removes absolute byte offsets so we can fragment.
ffmpeg -i source.mp4 \ ffmpeg -i source.mp4 -y \
-c copy \
-movflags empty_moov+frag_every_frame+separate_moof+omit_tfhd_offset \ -movflags empty_moov+frag_every_frame+separate_moof+omit_tfhd_offset \
-c:v copy \ fragmented.mp4 2>&1
-an \
fragmented.mp4

View File

@ -1,4 +1,5 @@
import Player from "./player" import Player from "./player"
import Transport from "./transport"
// @ts-ignore embed the certificate fingerprint using bundler // @ts-ignore embed the certificate fingerprint using bundler
import fingerprintHex from 'bundle-text:../fingerprint.hex'; import fingerprintHex from 'bundle-text:../fingerprint.hex';
@ -14,19 +15,23 @@ const params = new URLSearchParams(window.location.search)
const url = params.get("url") || "https://127.0.0.1:4443/watch" const url = params.get("url") || "https://127.0.0.1:4443/watch"
const canvas = document.querySelector<HTMLCanvasElement>("canvas#video")! const canvas = document.querySelector<HTMLCanvasElement>("canvas#video")!
const player = new Player({ const transport = new Transport({
url: url, url: url,
fingerprint: { // TODO remove when Chrome accepts the system CA fingerprint: { // TODO remove when Chrome accepts the system CA
"algorithm": "sha-256", "algorithm": "sha-256",
"value": new Uint8Array(fingerprint), "value": new Uint8Array(fingerprint),
}, },
canvas: canvas, })
const player = new Player({
transport,
canvas: canvas.transferControlToOffscreen(),
}) })
const play = document.querySelector<HTMLElement>("#screen #play")! const play = document.querySelector<HTMLElement>("#screen #play")!
let playFunc = (e: Event) => { let playFunc = (e: Event) => {
player.play() player.play({})
e.preventDefault() e.preventDefault()
play.removeEventListener('click', playFunc) play.removeEventListener('click', playFunc)

View File

@ -1,81 +0,0 @@
import * as Message from "./message"
import { RingInit } from "./ring"
// Abstracts the Worker and Worklet into a simpler API
// This class must be created on the main thread due to AudioContext.
export default class Media {
context: AudioContext;
worker: Worker;
worklet: Promise<AudioWorkletNode>;
constructor(videoConfig: Message.VideoConfig) {
// Assume 44.1kHz and two audio channels
const audioConfig = {
sampleRate: 44100,
ring: new RingInit(2, 4410), // 100ms at 44.1khz
}
const config = {
audio: audioConfig,
video: videoConfig,
}
this.context = new AudioContext({
latencyHint: "interactive",
sampleRate: config.audio.sampleRate,
})
this.worker = this.setupWorker(config)
this.worklet = this.setupWorklet(config)
}
private setupWorker(config: Message.Config): Worker {
const url = new URL('worker.ts', import.meta.url)
const worker = new Worker(url, {
type: "module",
name: "media",
})
worker.postMessage({ config }, [ config.video.canvas ])
return worker
}
private async setupWorklet(config: Message.Config): Promise<AudioWorkletNode> {
// Load the worklet source code.
const url = new URL('worklet.ts', import.meta.url)
await this.context.audioWorklet.addModule(url)
const volume = this.context.createGain()
volume.gain.value = 2.0;
// Create a worklet
const worklet = new AudioWorkletNode(this.context, 'renderer');
worklet.onprocessorerror = (e: Event) => {
console.error("Audio worklet error:", e)
};
worklet.port.postMessage({ config })
// Connect the worklet to the volume node and then to the speakers
worklet.connect(volume)
volume.connect(this.context.destination)
return worklet
}
init(init: Message.Init) {
this.worker.postMessage({ init }, [ init.buffer.buffer, init.reader ])
}
segment(segment: Message.Segment) {
this.worker.postMessage({ segment }, [ segment.buffer.buffer, segment.reader ])
}
play(play: Message.Play) {
this.context.resume()
//this.worker.postMessage({ play })
}
}

View File

@ -1,12 +1,16 @@
import * as MP4 from "./rename" // Rename some stuff so it's on brand.
export * from "./rename" export {
createFile as New,
MP4File as File,
MP4ArrayBuffer as ArrayBuffer,
MP4Info as Info,
MP4Track as Track,
MP4AudioTrack as AudioTrack,
MP4VideoTrack as VideoTrack,
DataStream as Stream,
Box,
ISOFile,
Sample,
} from "mp4box"
export { Init, InitParser } from "./init" export { Init, InitParser } from "./init"
export function isAudioTrack(track: MP4.Track): track is MP4.AudioTrack {
return (track as MP4.AudioTrack).audio !== undefined;
}
export function isVideoTrack(track: MP4.Track): track is MP4.VideoTrack {
return (track as MP4.VideoTrack).video !== undefined;
}

View File

@ -82,7 +82,7 @@ declare module "mp4box" {
description: any; description: any;
data: ArrayBuffer; data: ArrayBuffer;
size: number; size: number;
alreadyRead: number; alreadyRead?: number;
duration: number; duration: number;
cts: number; cts: number;
dts: number; dts: number;
@ -104,7 +104,7 @@ declare module "mp4box" {
const LITTLE_ENDIAN: boolean; const LITTLE_ENDIAN: boolean;
export class DataStream { export class DataStream {
constructor(buffer: ArrayBuffer, byteOffset?: number, littleEndian?: boolean); constructor(buffer?: ArrayBuffer, byteOffset?: number, littleEndian?: boolean);
getPosition(): number; getPosition(): number;
get byteLength(): number; get byteLength(): number;
@ -144,5 +144,82 @@ declare module "mp4box" {
// TODO I got bored porting the remaining functions // TODO I got bored porting the remaining functions
} }
export class Box {
write(stream: DataStream): void;
}
export interface TrackOptions {
id?: number;
type?: string;
width?: number;
height?: number;
duration?: number;
layer?: number;
timescale?: number;
media_duration?: number;
language?: string;
hdlr?: string;
// video
avcDecoderConfigRecord?: any;
// audio
balance?: number;
channel_count?: number;
samplesize?: number;
samplerate?: number;
//captions
namespace?: string;
schema_location?: string;
auxiliary_mime_types?: string;
description?: any;
description_boxes?: Box[];
default_sample_description_index_id?: number;
default_sample_duration?: number;
default_sample_size?: number;
default_sample_flags?: number;
}
export interface FileOptions {
brands?: string[];
timescale?: number;
rate?: number;
duration?: number;
width?: number;
}
export interface SampleOptions {
sample_description_index?: number;
duration?: number;
cts?: number;
dts?: number;
is_sync?: boolean;
is_leading?: number;
depends_on?: number;
is_depended_on?: number;
has_redundancy?: number;
degradation_priority?: number;
subsamples?: any;
}
// TODO add the remaining functions
// TODO move to another module
export class ISOFile {
constructor(stream?: DataStream);
init(options?: FileOptions): ISOFile;
addTrack(options?: TrackOptions): number;
addSample(track: number, data: ArrayBuffer, options?: SampleOptions): Sample;
createSingleSampleMoof(sample: Sample): Box;
// helpers
getTrackById(id: number): Box | undefined;
getTrexById(id: number): Box | undefined;
}
export { }; export { };
} }

View File

@ -1,12 +0,0 @@
// Rename some stuff so it's on brand.
export {
createFile as New,
MP4File as File,
MP4ArrayBuffer as ArrayBuffer,
MP4Info as Info,
MP4Track as Track,
MP4AudioTrack as AudioTrack,
MP4VideoTrack as VideoTrack,
DataStream as Stream,
Sample,
} from "mp4box"

View File

@ -1,7 +1,6 @@
import * as Message from "./message"; import * as Message from "./message";
import * as MP4 from "../mp4" import * as MP4 from "../mp4"
import * as Stream from "../stream" import * as Stream from "../stream"
import * as Util from "../util"
import Renderer from "./renderer" import Renderer from "./renderer"
@ -82,7 +81,7 @@ export default class Decoder {
// We need a sample to initalize the video decoder, because of mp4box limitations. // We need a sample to initalize the video decoder, because of mp4box limitations.
let sample = samples[0]; let sample = samples[0];
if (MP4.isVideoTrack(track)) { if (isVideoTrack(track)) {
// Configure the decoder using the AVC box for H.264 // Configure the decoder using the AVC box for H.264
// TODO it should be easy to support other codecs, just need to know the right boxes. // TODO it should be easy to support other codecs, just need to know the right boxes.
const avcc = sample.description.avcC; const avcc = sample.description.avcC;
@ -105,7 +104,7 @@ export default class Decoder {
}) })
decoder = videoDecoder decoder = videoDecoder
} else if (MP4.isAudioTrack(track)) { } else if (isAudioTrack(track)) {
const audioDecoder = new AudioDecoder({ const audioDecoder = new AudioDecoder({
output: this.renderer.push.bind(this.renderer), output: this.renderer.push.bind(this.renderer),
error: console.warn, error: console.warn,
@ -158,3 +157,11 @@ function isAudioDecoder(decoder: AudioDecoder | VideoDecoder): decoder is AudioD
function isVideoDecoder(decoder: AudioDecoder | VideoDecoder): decoder is VideoDecoder { function isVideoDecoder(decoder: AudioDecoder | VideoDecoder): decoder is VideoDecoder {
return decoder instanceof VideoDecoder return decoder instanceof VideoDecoder
} }
function isAudioTrack(track: MP4.Track): track is MP4.AudioTrack {
return (track as MP4.AudioTrack).audio !== undefined;
}
function isVideoTrack(track: MP4.Track): track is MP4.VideoTrack {
return (track as MP4.VideoTrack).video !== undefined;
}

View File

@ -1,33 +1,89 @@
import * as Message from "./message"
import * as Ring from "./ring"
import Transport from "../transport" import Transport from "../transport"
import Media from "../media"
export interface PlayerInit { export interface Config {
url: string; transport: Transport
fingerprint?: WebTransportHash; // the certificate fingerprint, temporarily needed for local development canvas: OffscreenCanvas;
canvas: HTMLCanvasElement;
} }
// This class must be created on the main thread due to AudioContext.
export default class Player { export default class Player {
media: Media; context: AudioContext;
transport: Transport; worker: Worker;
worklet: Promise<AudioWorkletNode>;
constructor(props: PlayerInit) { transport: Transport
this.media = new Media({
canvas: props.canvas.transferControlToOffscreen(),
})
this.transport = new Transport({ constructor(config: Config) {
url: props.url, this.transport = config.transport
fingerprint: props.fingerprint, this.transport.callback = this;
media: this.media,
}) const video = {
canvas: config.canvas,
};
// Assume 44.1kHz and two audio channels
const audio = {
sampleRate: 44100,
ring: new Ring.Buffer(2, 4410), // 100ms at 44.1khz
} }
async close() { this.context = new AudioContext({
this.transport.close() latencyHint: "interactive",
sampleRate: audio.sampleRate,
})
this.worker = this.setupWorker({ audio, video })
this.worklet = this.setupWorklet(audio)
} }
play() { private setupWorker(config: Message.Config): Worker {
this.media.play({}) const url = new URL('worker.ts', import.meta.url)
const worker = new Worker(url, {
type: "module",
name: "media",
})
worker.postMessage({ config }, [ config.video.canvas ])
return worker
}
private async setupWorklet(config: Message.AudioConfig): Promise<AudioWorkletNode> {
// Load the worklet source code.
const url = new URL('worklet.ts', import.meta.url)
await this.context.audioWorklet.addModule(url)
const volume = this.context.createGain()
volume.gain.value = 2.0;
// Create a worklet
const worklet = new AudioWorkletNode(this.context, 'renderer');
worklet.onprocessorerror = (e: Event) => {
console.error("Audio worklet error:", e)
};
worklet.port.postMessage({ config })
// Connect the worklet to the volume node and then to the speakers
worklet.connect(volume)
volume.connect(this.context.destination)
return worklet
}
onInit(init: Message.Init) {
this.worker.postMessage({ init }, [ init.buffer.buffer, init.reader ])
}
onSegment(segment: Message.Segment) {
this.worker.postMessage({ segment }, [ segment.buffer.buffer, segment.reader ])
}
play(play: Message.Play) {
this.context.resume()
//this.worker.postMessage({ play })
} }
} }

View File

@ -1,5 +1,4 @@
import * as MP4 from "../mp4" import * as Ring from "./ring"
import { RingInit } from "../media/ring"
export interface Config { export interface Config {
audio: AudioConfig; audio: AudioConfig;
@ -13,7 +12,7 @@ export interface VideoConfig {
export interface AudioConfig { export interface AudioConfig {
// audio stuff // audio stuff
sampleRate: number; sampleRate: number;
ring: RingInit; ring: Ring.Buffer;
} }
export interface Init { export interface Init {

View File

@ -68,8 +68,11 @@ export default class Renderer {
} }
draw(now: DOMHighResTimeStamp) { draw(now: DOMHighResTimeStamp) {
// Convert to microseconds
now *= 1000;
// Determine the target timestamp. // Determine the target timestamp.
const target = 1000 * now - this.sync! const target = now - this.sync!
this.drawAudio(now, target) this.drawAudio(now, target)
this.drawVideo(now, target) this.drawVideo(now, target)
@ -85,9 +88,11 @@ export default class Renderer {
// Check if we should skip some frames // Check if we should skip some frames
while (this.audioQueue.length) { while (this.audioQueue.length) {
const next = this.audioQueue[0] const next = this.audioQueue[0]
if (next.timestamp >= target) {
if (next.timestamp > target) {
let ok = this.audioRing.write(next) let ok = this.audioRing.write(next)
if (!ok) { if (!ok) {
console.warn("ring buffer is full")
// No more space in the ring // No more space in the ring
break break
} }
@ -101,7 +106,7 @@ export default class Renderer {
} }
drawVideo(now: DOMHighResTimeStamp, target: DOMHighResTimeStamp) { drawVideo(now: DOMHighResTimeStamp, target: DOMHighResTimeStamp) {
if (this.videoQueue.length == 0) return; if (!this.videoQueue.length) return;
let frame = this.videoQueue[0]; let frame = this.videoQueue[0];
if (frame.timestamp >= target) { if (frame.timestamp >= target) {

View File

@ -11,15 +11,15 @@ export class Ring {
channels: Float32Array[]; channels: Float32Array[];
capacity: number; capacity: number;
constructor(init: RingInit) { constructor(buf: Buffer) {
this.state = new Int32Array(init.state) this.state = new Int32Array(buf.state)
this.channels = [] this.channels = []
for (let channel of init.channels) { for (let channel of buf.channels) {
this.channels.push(new Float32Array(channel)) this.channels.push(new Float32Array(channel))
} }
this.capacity = init.capacity this.capacity = buf.capacity
} }
// Add the samples for single audio frame // Add the samples for single audio frame
@ -121,7 +121,7 @@ export class Ring {
} }
// No prototype to make this easier to send via postMessage // No prototype to make this easier to send via postMessage
export class RingInit { export class Buffer {
state: SharedArrayBuffer; state: SharedArrayBuffer;
channels: SharedArrayBuffer[]; channels: SharedArrayBuffer[];

View File

@ -20,12 +20,12 @@ class Renderer extends AudioWorkletProcessor {
onMessage(e: MessageEvent) { onMessage(e: MessageEvent) {
if (e.data.config) { if (e.data.config) {
this.config(e.data.config) this.onConfig(e.data.config)
} }
} }
config(config: Message.Config) { onConfig(config: Message.AudioConfig) {
this.ring = new Ring(config.audio.ring) this.ring = new Ring(config.ring)
} }
// Inputs and outputs in groups of 128 samples. // Inputs and outputs in groups of 128 samples.

View File

@ -1,25 +1,18 @@
import * as Message from "./message"
import * as Stream from "../stream" import * as Stream from "../stream"
import * as MP4 from "../mp4" import * as Interface from "./interface"
import Media from "../media" export interface Config {
export interface TransportInit {
url: string; url: string;
fingerprint?: WebTransportHash; // the certificate fingerprint, temporarily needed for local development fingerprint?: WebTransportHash; // the certificate fingerprint, temporarily needed for local development
media: Media;
} }
export default class Transport { export default class Transport {
quic: Promise<WebTransport>; quic: Promise<WebTransport>;
api: Promise<WritableStream>; api: Promise<WritableStream>;
callback?: Interface.Callback;
media: Media; constructor(config: Config) {
this.quic = this.connect(config)
constructor(props: TransportInit) {
this.media = props.media;
this.quic = this.connect(props)
// Create a unidirectional stream for all of our messages // Create a unidirectional stream for all of our messages
this.api = this.quic.then((q) => { this.api = this.quic.then((q) => {
@ -35,13 +28,13 @@ export default class Transport {
} }
// Helper function to make creating a promise easier // Helper function to make creating a promise easier
private async connect(props: TransportInit): Promise<WebTransport> { private async connect(config: Config): Promise<WebTransport> {
let options: WebTransportOptions = {}; let options: WebTransportOptions = {};
if (props.fingerprint) { if (config.fingerprint) {
options.serverCertificateHashes = [ props.fingerprint ] options.serverCertificateHashes = [ config.fingerprint ]
} }
const quic = new WebTransport(props.url, options) const quic = new WebTransport(config.url, options)
await quic.ready await quic.ready
return quic return quic
} }
@ -86,12 +79,12 @@ export default class Transport {
const msg = JSON.parse(payload) const msg = JSON.parse(payload)
if (msg.init) { if (msg.init) {
return this.media.init({ return this.callback?.onInit({
buffer: r.buffer, buffer: r.buffer,
reader: r.reader, reader: r.reader,
}) })
} else if (msg.segment) { } else if (msg.segment) {
return this.media.segment({ return this.callback?.onSegment({
buffer: r.buffer, buffer: r.buffer,
reader: r.reader, reader: r.reader,
}) })

View File

@ -0,0 +1,14 @@
export interface Callback {
onInit(init: Init): any
onSegment(segment: Segment): any
}
export interface Init {
buffer: Uint8Array; // unread buffered data
reader: ReadableStream; // unread unbuffered data
}
export interface Segment {
buffer: Uint8Array; // unread buffered data
reader: ReadableStream; // unread unbuffered data
}

View File

@ -1,12 +1,5 @@
export interface Init { export interface Init {}
id: string export interface Segment {}
}
export interface Segment {
init: string // id of the init segment
timestamp: number // presentation timestamp in milliseconds of the first sample
// TODO track would be nice
}
export interface Debug { export interface Debug {
max_bitrate: number max_bitrate: number