Tabs not spaces u nerds. (#18)

This commit is contained in:
kixelated 2023-05-23 12:04:27 -07:00 committed by GitHub
parent dfe5cc1771
commit f4c8c6cf89
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 2027 additions and 2094 deletions

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
root = true
[*]
charset = utf-8
end_of_line = lf
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = tab
indent_size = 4
max_line_length = 120

View File

@ -26,8 +26,8 @@ jobs:
- name: test - name: test
run: cargo test --verbose run: cargo test --verbose
- name: fmt
run: cargo fmt --check
- name: clippy - name: clippy
run: cargo clippy run: cargo clippy
- name: fmt
run: cargo fmt --check

View File

@ -25,8 +25,5 @@ jobs:
- name: build - name: build
run: yarn build run: yarn build
- name: fmt
run: yarn prettier --check .
- name: lint - name: lint
run: yarn lint run: yarn lint

4
server/.rustfmt.toml Normal file
View File

@ -0,0 +1,4 @@
# i die on this hill
hard_tabs = true
max_width = 120

View File

@ -1,3 +0,0 @@
{
"rust-analyzer.showUnlinkedFileNotification": false
}

View File

@ -5,34 +5,34 @@ use clap::Parser;
/// Search for a pattern in a file and display the lines that contain it. /// Search for a pattern in a file and display the lines that contain it.
#[derive(Parser)] #[derive(Parser)]
struct Cli { struct Cli {
/// Listen on this address /// Listen on this address
#[arg(short, long, default_value = "[::]:4443")] #[arg(short, long, default_value = "[::]:4443")]
addr: String, addr: String,
/// Use the certificate file at this path /// Use the certificate file at this path
#[arg(short, long, default_value = "../cert/localhost.crt")] #[arg(short, long, default_value = "../cert/localhost.crt")]
cert: String, cert: String,
/// Use the private key at this path /// Use the private key at this path
#[arg(short, long, default_value = "../cert/localhost.key")] #[arg(short, long, default_value = "../cert/localhost.key")]
key: String, key: String,
/// Use the media file at this path /// Use the media file at this path
#[arg(short, long, default_value = "../media/fragmented.mp4")] #[arg(short, long, default_value = "../media/fragmented.mp4")]
media: String, media: String,
} }
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
env_logger::init(); env_logger::init();
let args = Cli::parse(); let args = Cli::parse();
let server_config = transport::Config { let server_config = transport::Config {
addr: args.addr, addr: args.addr,
cert: args.cert, cert: args.cert,
key: args.key, key: args.key,
}; };
let mut server = transport::Server::<session::Session>::new(server_config).unwrap(); let mut server = transport::Server::<session::Session>::new(server_config).unwrap();
server.run() server.run()
} }

View File

@ -8,223 +8,219 @@ use mp4;
use mp4::ReadBox; use mp4::ReadBox;
pub struct Source { pub struct Source {
// We read the file once, in order, and don't seek backwards. // We read the file once, in order, and don't seek backwards.
reader: io::BufReader<fs::File>, reader: io::BufReader<fs::File>,
// The timestamp when the broadcast "started", so we can sleep to simulate a live stream. // The timestamp when the broadcast "started", so we can sleep to simulate a live stream.
start: time::Instant, start: time::Instant,
// The initialization payload; ftyp + moov boxes. // The initialization payload; ftyp + moov boxes.
pub init: Vec<u8>, pub init: Vec<u8>,
// The timescale used for each track. // The timescale used for each track.
timescales: HashMap<u32, u32>, timescales: HashMap<u32, u32>,
// Any fragments parsed and ready to be returned by next(). // Any fragments parsed and ready to be returned by next().
fragments: VecDeque<Fragment>, fragments: VecDeque<Fragment>,
} }
pub struct Fragment { pub struct Fragment {
// The track ID for the fragment. // The track ID for the fragment.
pub track_id: u32, pub track_id: u32,
// The data of the fragment. // The data of the fragment.
pub data: Vec<u8>, pub data: Vec<u8>,
// Whether this fragment is a keyframe. // Whether this fragment is a keyframe.
pub keyframe: bool, pub keyframe: bool,
// The timestamp of the fragment, in milliseconds, to simulate a live stream. // The timestamp of the fragment, in milliseconds, to simulate a live stream.
pub timestamp: u64, pub timestamp: u64,
} }
impl Source { impl Source {
pub fn new(path: &str) -> anyhow::Result<Self> { pub fn new(path: &str) -> anyhow::Result<Self> {
let f = fs::File::open(path)?; let f = fs::File::open(path)?;
let mut reader = io::BufReader::new(f); let mut reader = io::BufReader::new(f);
let start = time::Instant::now(); let start = time::Instant::now();
let ftyp = read_atom(&mut reader)?; let ftyp = read_atom(&mut reader)?;
anyhow::ensure!(&ftyp[4..8] == b"ftyp", "expected ftyp atom"); anyhow::ensure!(&ftyp[4..8] == b"ftyp", "expected ftyp atom");
let moov = read_atom(&mut reader)?; let moov = read_atom(&mut reader)?;
anyhow::ensure!(&moov[4..8] == b"moov", "expected moov atom"); anyhow::ensure!(&moov[4..8] == b"moov", "expected moov atom");
let mut init = ftyp; let mut init = ftyp;
init.extend(&moov); init.extend(&moov);
// We're going to parse the moov box. // We're going to parse the moov box.
// We have to read the moov box header to correctly advance the cursor for the mp4 crate. // We have to read the moov box header to correctly advance the cursor for the mp4 crate.
let mut moov_reader = io::Cursor::new(&moov); let mut moov_reader = io::Cursor::new(&moov);
let moov_header = mp4::BoxHeader::read(&mut moov_reader)?; let moov_header = mp4::BoxHeader::read(&mut moov_reader)?;
// Parse the moov box so we can detect the timescales for each track. // Parse the moov box so we can detect the timescales for each track.
let moov = mp4::MoovBox::read_box(&mut moov_reader, moov_header.size)?; let moov = mp4::MoovBox::read_box(&mut moov_reader, moov_header.size)?;
Ok(Self { Ok(Self {
reader, reader,
start, start,
init, init,
timescales: timescales(&moov), timescales: timescales(&moov),
fragments: VecDeque::new(), fragments: VecDeque::new(),
}) })
} }
pub fn fragment(&mut self) -> anyhow::Result<Option<Fragment>> { pub fn fragment(&mut self) -> anyhow::Result<Option<Fragment>> {
if self.fragments.is_empty() { if self.fragments.is_empty() {
self.parse()?; self.parse()?;
}; };
if self.timeout().is_some() { if self.timeout().is_some() {
return Ok(None); return Ok(None);
} }
Ok(self.fragments.pop_front()) Ok(self.fragments.pop_front())
} }
fn parse(&mut self) -> anyhow::Result<()> { fn parse(&mut self) -> anyhow::Result<()> {
loop { loop {
let atom = read_atom(&mut self.reader)?; let atom = read_atom(&mut self.reader)?;
let mut reader = io::Cursor::new(&atom); let mut reader = io::Cursor::new(&atom);
let header = mp4::BoxHeader::read(&mut reader)?; let header = mp4::BoxHeader::read(&mut reader)?;
match header.name { match header.name {
mp4::BoxType::FtypBox | mp4::BoxType::MoovBox => { mp4::BoxType::FtypBox | mp4::BoxType::MoovBox => {
anyhow::bail!("must call init first") anyhow::bail!("must call init first")
} }
mp4::BoxType::MoofBox => { mp4::BoxType::MoofBox => {
let moof = mp4::MoofBox::read_box(&mut reader, header.size)?; let moof = mp4::MoofBox::read_box(&mut reader, header.size)?;
if moof.trafs.len() != 1 { if moof.trafs.len() != 1 {
// We can't split the mdat atom, so this is impossible to support // We can't split the mdat atom, so this is impossible to support
anyhow::bail!("multiple tracks per moof atom") anyhow::bail!("multiple tracks per moof atom")
} }
self.fragments.push_back(Fragment { self.fragments.push_back(Fragment {
track_id: moof.trafs[0].tfhd.track_id, track_id: moof.trafs[0].tfhd.track_id,
data: atom, data: atom,
keyframe: has_keyframe(&moof), keyframe: has_keyframe(&moof),
timestamp: first_timestamp(&moof).expect("couldn't find timestamp"), timestamp: first_timestamp(&moof).expect("couldn't find timestamp"),
}) })
} }
mp4::BoxType::MdatBox => { mp4::BoxType::MdatBox => {
let moof = self.fragments.back().expect("no atom before mdat"); let moof = self.fragments.back().expect("no atom before mdat");
self.fragments.push_back(Fragment { self.fragments.push_back(Fragment {
track_id: moof.track_id, track_id: moof.track_id,
data: atom, data: atom,
keyframe: false, keyframe: false,
timestamp: moof.timestamp, timestamp: moof.timestamp,
}); });
// We have some media data, return so we can start sending it. // We have some media data, return so we can start sending it.
return Ok(()); return Ok(());
} }
_ => { _ => {
// Skip unknown atoms // Skip unknown atoms
} }
} }
} }
} }
// Simulate a live stream by sleeping until the next timestamp in the media. // Simulate a live stream by sleeping until the next timestamp in the media.
pub fn timeout(&self) -> Option<time::Duration> { pub fn timeout(&self) -> Option<time::Duration> {
let next = self.fragments.front()?; let next = self.fragments.front()?;
let timestamp = next.timestamp; let timestamp = next.timestamp;
// Find the timescale for the track. // Find the timescale for the track.
let timescale = self.timescales.get(&next.track_id).unwrap(); let timescale = self.timescales.get(&next.track_id).unwrap();
let delay = time::Duration::from_millis(1000 * timestamp / *timescale as u64); let delay = time::Duration::from_millis(1000 * timestamp / *timescale as u64);
let elapsed = self.start.elapsed(); let elapsed = self.start.elapsed();
delay.checked_sub(elapsed) delay.checked_sub(elapsed)
} }
} }
// Read a full MP4 atom into a vector. // Read a full MP4 atom into a vector.
pub fn read_atom<R: Read>(reader: &mut R) -> anyhow::Result<Vec<u8>> { pub fn read_atom<R: Read>(reader: &mut R) -> anyhow::Result<Vec<u8>> {
// Read the 8 bytes for the size + type // Read the 8 bytes for the size + type
let mut buf = [0u8; 8]; let mut buf = [0u8; 8];
reader.read_exact(&mut buf)?; reader.read_exact(&mut buf)?;
// Convert the first 4 bytes into the size. // Convert the first 4 bytes into the size.
let size = u32::from_be_bytes(buf[0..4].try_into()?) as u64; let size = u32::from_be_bytes(buf[0..4].try_into()?) as u64;
//let typ = &buf[4..8].try_into().ok().unwrap(); //let typ = &buf[4..8].try_into().ok().unwrap();
let mut raw = buf.to_vec(); let mut raw = buf.to_vec();
let mut limit = match size { let mut limit = match size {
// Runs until the end of the file. // Runs until the end of the file.
0 => reader.take(u64::MAX), 0 => reader.take(u64::MAX),
// The next 8 bytes are the extended size to be used instead. // The next 8 bytes are the extended size to be used instead.
1 => { 1 => {
reader.read_exact(&mut buf)?; reader.read_exact(&mut buf)?;
let size_large = u64::from_be_bytes(buf); let size_large = u64::from_be_bytes(buf);
anyhow::ensure!( anyhow::ensure!(size_large >= 16, "impossible extended box size: {}", size_large);
size_large >= 16,
"impossible extended box size: {}",
size_large
);
reader.take(size_large - 16) reader.take(size_large - 16)
} }
2..=7 => { 2..=7 => {
anyhow::bail!("impossible box size: {}", size) anyhow::bail!("impossible box size: {}", size)
} }
// Otherwise read based on the size. // Otherwise read based on the size.
size => reader.take(size - 8), size => reader.take(size - 8),
}; };
// Append to the vector and return it. // Append to the vector and return it.
limit.read_to_end(&mut raw)?; limit.read_to_end(&mut raw)?;
Ok(raw) Ok(raw)
} }
fn has_keyframe(moof: &mp4::MoofBox) -> bool { fn has_keyframe(moof: &mp4::MoofBox) -> bool {
for traf in &moof.trafs { for traf in &moof.trafs {
// TODO trak default flags if this is None // TODO trak default flags if this is None
let default_flags = traf.tfhd.default_sample_flags.unwrap_or_default(); let default_flags = traf.tfhd.default_sample_flags.unwrap_or_default();
let trun = match &traf.trun { let trun = match &traf.trun {
Some(t) => t, Some(t) => t,
None => return false, None => return false,
}; };
for i in 0..trun.sample_count { for i in 0..trun.sample_count {
let mut flags = match trun.sample_flags.get(i as usize) { let mut flags = match trun.sample_flags.get(i as usize) {
Some(f) => *f, Some(f) => *f,
None => default_flags, None => default_flags,
}; };
if i == 0 && trun.first_sample_flags.is_some() { if i == 0 && trun.first_sample_flags.is_some() {
flags = trun.first_sample_flags.unwrap(); flags = trun.first_sample_flags.unwrap();
} }
// https://chromium.googlesource.com/chromium/src/media/+/master/formats/mp4/track_run_iterator.cc#177 // https://chromium.googlesource.com/chromium/src/media/+/master/formats/mp4/track_run_iterator.cc#177
let keyframe = (flags >> 24) & 0x3 == 0x2; // kSampleDependsOnNoOther let keyframe = (flags >> 24) & 0x3 == 0x2; // kSampleDependsOnNoOther
let non_sync = (flags >> 16) & 0x1 == 0x1; // kSampleIsNonSyncSample let non_sync = (flags >> 16) & 0x1 == 0x1; // kSampleIsNonSyncSample
if keyframe && !non_sync { if keyframe && !non_sync {
return true; return true;
} }
} }
} }
false false
} }
fn first_timestamp(moof: &mp4::MoofBox) -> Option<u64> { fn first_timestamp(moof: &mp4::MoofBox) -> Option<u64> {
Some(moof.trafs.first()?.tfdt.as_ref()?.base_media_decode_time) Some(moof.trafs.first()?.tfdt.as_ref()?.base_media_decode_time)
} }
fn timescales(moov: &mp4::MoovBox) -> HashMap<u32, u32> { fn timescales(moov: &mp4::MoovBox) -> HashMap<u32, u32> {
moov.traks moov.traks
.iter() .iter()
.map(|trak| (trak.tkhd.track_id, trak.mdia.mdhd.timescale)) .map(|trak| (trak.tkhd.track_id, trak.mdia.mdhd.timescale))
.collect() .collect()
} }

View File

@ -2,8 +2,8 @@ use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct Message { pub struct Message {
pub init: Option<Init>, pub init: Option<Init>,
pub segment: Option<Segment>, pub segment: Option<Segment>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -11,27 +11,27 @@ pub struct Init {}
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct Segment { pub struct Segment {
pub track_id: u32, pub track_id: u32,
} }
impl Message { impl Message {
pub fn new() -> Self { pub fn new() -> Self {
Message { Message {
init: None, init: None,
segment: None, segment: None,
} }
} }
pub fn serialize(&self) -> anyhow::Result<Vec<u8>> { pub fn serialize(&self) -> anyhow::Result<Vec<u8>> {
let str = serde_json::to_string(self)?; let str = serde_json::to_string(self)?;
let bytes = str.as_bytes(); let bytes = str.as_bytes();
let size = bytes.len() + 8; let size = bytes.len() + 8;
let mut out = Vec::with_capacity(size); let mut out = Vec::with_capacity(size);
out.extend_from_slice(&(size as u32).to_be_bytes()); out.extend_from_slice(&(size as u32).to_be_bytes());
out.extend_from_slice(b"warp"); out.extend_from_slice(b"warp");
out.extend_from_slice(bytes); out.extend_from_slice(bytes);
Ok(out) Ok(out)
} }
} }

View File

@ -10,145 +10,141 @@ use crate::{media, transport};
#[derive(Default)] #[derive(Default)]
pub struct Session { pub struct Session {
media: Option<media::Source>, media: Option<media::Source>,
streams: transport::Streams, // An easy way of buffering stream data. streams: transport::Streams, // An easy way of buffering stream data.
tracks: hmap::HashMap<u32, u64>, // map from track_id to current stream_id tracks: hmap::HashMap<u32, u64>, // map from track_id to current stream_id
} }
impl transport::App for Session { impl transport::App for Session {
// Process any updates to a session. // Process any updates to a session.
fn poll( fn poll(&mut self, conn: &mut quiche::Connection, session: &mut webtransport::ServerSession) -> anyhow::Result<()> {
&mut self, loop {
conn: &mut quiche::Connection, let event = match session.poll(conn) {
session: &mut webtransport::ServerSession, Err(webtransport::Error::Done) => break,
) -> anyhow::Result<()> { Err(e) => return Err(e.into()),
loop { Ok(e) => e,
let event = match session.poll(conn) { };
Err(webtransport::Error::Done) => break,
Err(e) => return Err(e.into()),
Ok(e) => e,
};
log::debug!("webtransport event {:?}", event); log::debug!("webtransport event {:?}", event);
match event { match event {
webtransport::ServerEvent::ConnectRequest(_req) => { webtransport::ServerEvent::ConnectRequest(_req) => {
// you can handle request with // you can handle request with
// req.authority() // req.authority()
// req.path() // req.path()
// and you can validate this request with req.origin() // and you can validate this request with req.origin()
session.accept_connect_request(conn, None)?; session.accept_connect_request(conn, None)?;
// TODO // TODO
let media = media::Source::new("../media/fragmented.mp4")?; let media = media::Source::new("../media/fragmented.mp4")?;
let init = &media.init; let init = &media.init;
// Create a JSON header. // Create a JSON header.
let mut message = message::Message::new(); let mut message = message::Message::new();
message.init = Some(message::Init {}); message.init = Some(message::Init {});
let data = message.serialize()?; let data = message.serialize()?;
// Create a new stream and write the header. // Create a new stream and write the header.
let stream_id = session.open_stream(conn, false)?; let stream_id = session.open_stream(conn, false)?;
self.streams.send(conn, stream_id, data.as_slice(), false)?; self.streams.send(conn, stream_id, data.as_slice(), false)?;
self.streams.send(conn, stream_id, init.as_slice(), true)?; self.streams.send(conn, stream_id, init.as_slice(), true)?;
self.media = Some(media); self.media = Some(media);
} }
webtransport::ServerEvent::StreamData(stream_id) => { webtransport::ServerEvent::StreamData(stream_id) => {
let mut buf = vec![0; 10000]; let mut buf = vec![0; 10000];
while let Ok(len) = session.recv_stream_data(conn, stream_id, &mut buf) { while let Ok(len) = session.recv_stream_data(conn, stream_id, &mut buf) {
let _stream_data = &buf[0..len]; let _stream_data = &buf[0..len];
} }
} }
_ => {} _ => {}
} }
} }
// Send any pending stream data. // Send any pending stream data.
// NOTE: This doesn't return an error because it's async, and would be confusing. // NOTE: This doesn't return an error because it's async, and would be confusing.
self.streams.poll(conn); self.streams.poll(conn);
// Fetch the next media fragment, possibly queuing up stream data. // Fetch the next media fragment, possibly queuing up stream data.
self.poll_source(conn, session)?; self.poll_source(conn, session)?;
Ok(()) Ok(())
} }
fn timeout(&self) -> Option<time::Duration> { fn timeout(&self) -> Option<time::Duration> {
self.media.as_ref().and_then(|m| m.timeout()) self.media.as_ref().and_then(|m| m.timeout())
} }
} }
impl Session { impl Session {
fn poll_source( fn poll_source(
&mut self, &mut self,
conn: &mut quiche::Connection, conn: &mut quiche::Connection,
session: &mut webtransport::ServerSession, session: &mut webtransport::ServerSession,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
// Get the media source once the connection is established. // Get the media source once the connection is established.
let media = match &mut self.media { let media = match &mut self.media {
Some(m) => m, Some(m) => m,
None => return Ok(()), None => return Ok(()),
}; };
// Get the next media fragment. // Get the next media fragment.
let fragment = match media.fragment()? { let fragment = match media.fragment()? {
Some(f) => f, Some(f) => f,
None => return Ok(()), None => return Ok(()),
}; };
let stream_id = match self.tracks.get(&fragment.track_id) { let stream_id = match self.tracks.get(&fragment.track_id) {
// Close the old stream. // Close the old stream.
Some(stream_id) if fragment.keyframe => { Some(stream_id) if fragment.keyframe => {
self.streams.send(conn, *stream_id, &[], true)?; self.streams.send(conn, *stream_id, &[], true)?;
None None
} }
// Use the existing stream // Use the existing stream
Some(stream_id) => Some(*stream_id), Some(stream_id) => Some(*stream_id),
// No existing stream. // No existing stream.
_ => None, _ => None,
}; };
let stream_id = match stream_id { let stream_id = match stream_id {
// Use the existing stream, // Use the existing stream,
Some(stream_id) => stream_id, Some(stream_id) => stream_id,
// Open a new stream. // Open a new stream.
None => { None => {
// Create a new unidirectional stream. // Create a new unidirectional stream.
let stream_id = session.open_stream(conn, false)?; let stream_id = session.open_stream(conn, false)?;
// Set the stream priority to be equal to the timestamp. // Set the stream priority to be equal to the timestamp.
// We subtract from u64::MAX so newer media is sent important. // We subtract from u64::MAX so newer media is sent important.
// TODO prioritize audio // TODO prioritize audio
let order = u64::MAX - fragment.timestamp; let order = u64::MAX - fragment.timestamp;
self.streams.send_order(conn, stream_id, order); self.streams.send_order(conn, stream_id, order);
// Encode a JSON header indicating this is a new track. // Encode a JSON header indicating this is a new track.
let mut message: message::Message = message::Message::new(); let mut message: message::Message = message::Message::new();
message.segment = Some(message::Segment { message.segment = Some(message::Segment {
track_id: fragment.track_id, track_id: fragment.track_id,
}); });
// Write the header. // Write the header.
let data = message.serialize()?; let data = message.serialize()?;
self.streams.send(conn, stream_id, &data, false)?; self.streams.send(conn, stream_id, &data, false)?;
// Keep a mapping from the track id to the current stream id. // Keep a mapping from the track id to the current stream id.
self.tracks.insert(fragment.track_id, stream_id); self.tracks.insert(fragment.track_id, stream_id);
stream_id stream_id
} }
}; };
// Write the current fragment. // Write the current fragment.
let data = fragment.data.as_slice(); let data = fragment.data.as_slice();
self.streams.send(conn, stream_id, data, false)?; self.streams.send(conn, stream_id, data, false)?;
Ok(()) Ok(())
} }
} }

View File

@ -3,10 +3,6 @@ use std::time;
use quiche::h3::webtransport; use quiche::h3::webtransport;
pub trait App: Default { pub trait App: Default {
fn poll( fn poll(&mut self, conn: &mut quiche::Connection, session: &mut webtransport::ServerSession) -> anyhow::Result<()>;
&mut self, fn timeout(&self) -> Option<time::Duration>;
conn: &mut quiche::Connection,
session: &mut webtransport::ServerSession,
) -> anyhow::Result<()>;
fn timeout(&self) -> Option<time::Duration>;
} }

View File

@ -9,7 +9,7 @@ use super::app;
pub type Map<T> = hmap::HashMap<Id, Connection<T>>; pub type Map<T> = hmap::HashMap<Id, Connection<T>>;
pub struct Connection<T: app::App> { pub struct Connection<T: app::App> {
pub quiche: quiche::Connection, pub quiche: quiche::Connection,
pub session: Option<webtransport::ServerSession>, pub session: Option<webtransport::ServerSession>,
pub app: T, pub app: T,
} }

View File

@ -8,336 +8,319 @@ use super::connection;
const MAX_DATAGRAM_SIZE: usize = 1350; const MAX_DATAGRAM_SIZE: usize = 1350;
pub struct Server<T: app::App> { pub struct Server<T: app::App> {
// IO stuff // IO stuff
socket: mio::net::UdpSocket, socket: mio::net::UdpSocket,
poll: mio::Poll, poll: mio::Poll,
events: mio::Events, events: mio::Events,
// QUIC stuff // QUIC stuff
quic: quiche::Config, quic: quiche::Config,
seed: ring::hmac::Key, // connection ID seed seed: ring::hmac::Key, // connection ID seed
conns: connection::Map<T>, conns: connection::Map<T>,
} }
pub struct Config { pub struct Config {
pub addr: String, pub addr: String,
pub cert: String, pub cert: String,
pub key: String, pub key: String,
} }
impl<T: app::App> Server<T> { impl<T: app::App> Server<T> {
pub fn new(config: Config) -> io::Result<Self> { pub fn new(config: Config) -> io::Result<Self> {
// Listen on the provided socket address // Listen on the provided socket address
let addr = config.addr.parse().unwrap(); let addr = config.addr.parse().unwrap();
let mut socket = mio::net::UdpSocket::bind(addr).unwrap(); let mut socket = mio::net::UdpSocket::bind(addr).unwrap();
// Setup the event loop. // Setup the event loop.
let poll = mio::Poll::new().unwrap(); let poll = mio::Poll::new().unwrap();
let events = mio::Events::with_capacity(1024); let events = mio::Events::with_capacity(1024);
poll.registry() poll.registry()
.register(&mut socket, mio::Token(0), mio::Interest::READABLE) .register(&mut socket, mio::Token(0), mio::Interest::READABLE)
.unwrap(); .unwrap();
// Generate random values for connection IDs. // Generate random values for connection IDs.
let rng = ring::rand::SystemRandom::new(); let rng = ring::rand::SystemRandom::new();
let seed = ring::hmac::Key::generate(ring::hmac::HMAC_SHA256, &rng).unwrap(); let seed = ring::hmac::Key::generate(ring::hmac::HMAC_SHA256, &rng).unwrap();
// Create the configuration for the QUIC conns. // Create the configuration for the QUIC conns.
let mut quic = quiche::Config::new(quiche::PROTOCOL_VERSION).unwrap(); let mut quic = quiche::Config::new(quiche::PROTOCOL_VERSION).unwrap();
quic.load_cert_chain_from_pem_file(&config.cert).unwrap(); quic.load_cert_chain_from_pem_file(&config.cert).unwrap();
quic.load_priv_key_from_pem_file(&config.key).unwrap(); quic.load_priv_key_from_pem_file(&config.key).unwrap();
quic.set_application_protos(quiche::h3::APPLICATION_PROTOCOL) quic.set_application_protos(quiche::h3::APPLICATION_PROTOCOL).unwrap();
.unwrap(); quic.set_max_idle_timeout(5000);
quic.set_max_idle_timeout(5000); quic.set_max_recv_udp_payload_size(MAX_DATAGRAM_SIZE);
quic.set_max_recv_udp_payload_size(MAX_DATAGRAM_SIZE); quic.set_max_send_udp_payload_size(MAX_DATAGRAM_SIZE);
quic.set_max_send_udp_payload_size(MAX_DATAGRAM_SIZE); quic.set_initial_max_data(10_000_000);
quic.set_initial_max_data(10_000_000); quic.set_initial_max_stream_data_bidi_local(1_000_000);
quic.set_initial_max_stream_data_bidi_local(1_000_000); quic.set_initial_max_stream_data_bidi_remote(1_000_000);
quic.set_initial_max_stream_data_bidi_remote(1_000_000); quic.set_initial_max_stream_data_uni(1_000_000);
quic.set_initial_max_stream_data_uni(1_000_000); quic.set_initial_max_streams_bidi(100);
quic.set_initial_max_streams_bidi(100); quic.set_initial_max_streams_uni(100);
quic.set_initial_max_streams_uni(100); quic.set_disable_active_migration(true);
quic.set_disable_active_migration(true); quic.enable_early_data();
quic.enable_early_data(); quic.enable_dgram(true, 65536, 65536);
quic.enable_dgram(true, 65536, 65536);
let conns = Default::default(); let conns = Default::default();
Ok(Server { Ok(Server {
socket, socket,
poll, poll,
events, events,
quic, quic,
seed, seed,
conns, conns,
}) })
} }
pub fn run(&mut self) -> anyhow::Result<()> { pub fn run(&mut self) -> anyhow::Result<()> {
log::info!("listening on {}", self.socket.local_addr()?); log::info!("listening on {}", self.socket.local_addr()?);
loop { loop {
self.wait()?; self.wait()?;
self.receive()?; self.receive()?;
self.app()?; self.app()?;
self.send()?; self.send()?;
self.cleanup(); self.cleanup();
} }
} }
pub fn wait(&mut self) -> anyhow::Result<()> { pub fn wait(&mut self) -> anyhow::Result<()> {
// Find the shorter timeout from all the active connections. // Find the shorter timeout from all the active connections.
// //
// TODO: use event loop that properly supports timers // TODO: use event loop that properly supports timers
let timeout = self let timeout = self
.conns .conns
.values() .values()
.filter_map(|c| { .filter_map(|c| {
let timeout = c.quiche.timeout(); let timeout = c.quiche.timeout();
let expires = c.app.timeout(); let expires = c.app.timeout();
match (timeout, expires) { match (timeout, expires) {
(Some(a), Some(b)) => Some(a.min(b)), (Some(a), Some(b)) => Some(a.min(b)),
(Some(a), None) => Some(a), (Some(a), None) => Some(a),
(None, Some(b)) => Some(b), (None, Some(b)) => Some(b),
(None, None) => None, (None, None) => None,
} }
}) })
.min(); .min();
self.poll.poll(&mut self.events, timeout).unwrap(); self.poll.poll(&mut self.events, timeout).unwrap();
// If the event loop reported no events, it means that the timeout // If the event loop reported no events, it means that the timeout
// has expired, so handle it without attempting to read packets. We // has expired, so handle it without attempting to read packets. We
// will then proceed with the send loop. // will then proceed with the send loop.
if self.events.is_empty() { if self.events.is_empty() {
for conn in self.conns.values_mut() { for conn in self.conns.values_mut() {
conn.quiche.on_timeout(); conn.quiche.on_timeout();
} }
} }
Ok(()) Ok(())
} }
// Reads packets from the socket, updating any internal connection state. // Reads packets from the socket, updating any internal connection state.
fn receive(&mut self) -> anyhow::Result<()> { fn receive(&mut self) -> anyhow::Result<()> {
let mut src = [0; MAX_DATAGRAM_SIZE]; let mut src = [0; MAX_DATAGRAM_SIZE];
// Try reading any data currently available on the socket. // Try reading any data currently available on the socket.
loop { loop {
let (len, from) = match self.socket.recv_from(&mut src) { let (len, from) = match self.socket.recv_from(&mut src) {
Ok(v) => v, Ok(v) => v,
Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return Ok(()), Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return Ok(()),
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
}; };
let src = &mut src[..len]; let src = &mut src[..len];
let info = quiche::RecvInfo { let info = quiche::RecvInfo {
to: self.socket.local_addr().unwrap(), to: self.socket.local_addr().unwrap(),
from, from,
}; };
// Parse the QUIC packet's header. // Parse the QUIC packet's header.
let hdr = quiche::Header::from_slice(src, quiche::MAX_CONN_ID_LEN).unwrap(); let hdr = quiche::Header::from_slice(src, quiche::MAX_CONN_ID_LEN).unwrap();
let conn_id = ring::hmac::sign(&self.seed, &hdr.dcid); let conn_id = ring::hmac::sign(&self.seed, &hdr.dcid);
let conn_id = &conn_id.as_ref()[..quiche::MAX_CONN_ID_LEN]; let conn_id = &conn_id.as_ref()[..quiche::MAX_CONN_ID_LEN];
let conn_id = conn_id.to_vec().into(); let conn_id = conn_id.to_vec().into();
// Check if it's an existing connection. // Check if it's an existing connection.
if let Some(conn) = self.conns.get_mut(&hdr.dcid) { if let Some(conn) = self.conns.get_mut(&hdr.dcid) {
conn.quiche.recv(src, info)?; conn.quiche.recv(src, info)?;
if conn.session.is_none() && conn.quiche.is_established() { if conn.session.is_none() && conn.quiche.is_established() {
conn.session = Some(webtransport::ServerSession::with_transport( conn.session = Some(webtransport::ServerSession::with_transport(&mut conn.quiche)?)
&mut conn.quiche, }
)?)
}
continue; continue;
} else if let Some(conn) = self.conns.get_mut(&conn_id) { } else if let Some(conn) = self.conns.get_mut(&conn_id) {
conn.quiche.recv(src, info)?; conn.quiche.recv(src, info)?;
// TODO is this needed here? // TODO is this needed here?
if conn.session.is_none() && conn.quiche.is_established() { if conn.session.is_none() && conn.quiche.is_established() {
conn.session = Some(webtransport::ServerSession::with_transport( conn.session = Some(webtransport::ServerSession::with_transport(&mut conn.quiche)?)
&mut conn.quiche, }
)?)
}
continue; continue;
} }
if hdr.ty != quiche::Type::Initial { if hdr.ty != quiche::Type::Initial {
log::warn!("unknown connection ID"); log::warn!("unknown connection ID");
continue; continue;
} }
let mut dst = [0; MAX_DATAGRAM_SIZE]; let mut dst = [0; MAX_DATAGRAM_SIZE];
if !quiche::version_is_supported(hdr.version) { if !quiche::version_is_supported(hdr.version) {
let len = quiche::negotiate_version(&hdr.scid, &hdr.dcid, &mut dst).unwrap(); let len = quiche::negotiate_version(&hdr.scid, &hdr.dcid, &mut dst).unwrap();
let dst = &dst[..len]; let dst = &dst[..len];
self.socket.send_to(dst, from).unwrap(); self.socket.send_to(dst, from).unwrap();
continue; continue;
} }
let mut scid = [0; quiche::MAX_CONN_ID_LEN]; let mut scid = [0; quiche::MAX_CONN_ID_LEN];
scid.copy_from_slice(&conn_id); scid.copy_from_slice(&conn_id);
let scid = quiche::ConnectionId::from_ref(&scid); let scid = quiche::ConnectionId::from_ref(&scid);
// Token is always present in Initial packets. // Token is always present in Initial packets.
let token = hdr.token.as_ref().unwrap(); let token = hdr.token.as_ref().unwrap();
// Do stateless retry if the client didn't send a token. // Do stateless retry if the client didn't send a token.
if token.is_empty() { if token.is_empty() {
let new_token = mint_token(&hdr, &from); let new_token = mint_token(&hdr, &from);
let len = quiche::retry( let len = quiche::retry(&hdr.scid, &hdr.dcid, &scid, &new_token, hdr.version, &mut dst).unwrap();
&hdr.scid,
&hdr.dcid,
&scid,
&new_token,
hdr.version,
&mut dst,
)
.unwrap();
let dst = &dst[..len]; let dst = &dst[..len];
self.socket.send_to(dst, from).unwrap(); self.socket.send_to(dst, from).unwrap();
continue; continue;
} }
let odcid = validate_token(&from, token); let odcid = validate_token(&from, token);
// The token was not valid, meaning the retry failed, so // The token was not valid, meaning the retry failed, so
// drop the packet. // drop the packet.
if odcid.is_none() { if odcid.is_none() {
log::warn!("invalid token"); log::warn!("invalid token");
continue; continue;
} }
if scid.len() != hdr.dcid.len() { if scid.len() != hdr.dcid.len() {
log::warn!("invalid connection ID"); log::warn!("invalid connection ID");
continue; continue;
} }
// Reuse the source connection ID we sent in the Retry packet, // Reuse the source connection ID we sent in the Retry packet,
// instead of changing it again. // instead of changing it again.
let conn_id = hdr.dcid.clone(); let conn_id = hdr.dcid.clone();
let local_addr = self.socket.local_addr().unwrap(); let local_addr = self.socket.local_addr().unwrap();
log::debug!("new connection: dcid={:?} scid={:?}", hdr.dcid, scid); log::debug!("new connection: dcid={:?} scid={:?}", hdr.dcid, scid);
let mut conn = let mut conn = quiche::accept(&conn_id, odcid.as_ref(), local_addr, from, &mut self.quic)?;
quiche::accept(&conn_id, odcid.as_ref(), local_addr, from, &mut self.quic)?;
// Log each session with QLOG if the ENV var is set. // Log each session with QLOG if the ENV var is set.
if let Some(dir) = std::env::var_os("QLOGDIR") { if let Some(dir) = std::env::var_os("QLOGDIR") {
let id = format!("{:?}", &scid); let id = format!("{:?}", &scid);
let mut path = std::path::PathBuf::from(dir); let mut path = std::path::PathBuf::from(dir);
let filename = format!("server-{id}.sqlog"); let filename = format!("server-{id}.sqlog");
path.push(filename); path.push(filename);
let writer = match std::fs::File::create(&path) { let writer = match std::fs::File::create(&path) {
Ok(f) => std::io::BufWriter::new(f), Ok(f) => std::io::BufWriter::new(f),
Err(e) => panic!( Err(e) => panic!("Error creating qlog file attempted path was {:?}: {}", path, e),
"Error creating qlog file attempted path was {:?}: {}", };
path, e
),
};
conn.set_qlog( conn.set_qlog(
std::boxed::Box::new(writer), std::boxed::Box::new(writer),
"warp-server qlog".to_string(), "warp-server qlog".to_string(),
format!("{} id={}", "warp-server qlog", id), format!("{} id={}", "warp-server qlog", id),
); );
} }
// Process potentially coalesced packets. // Process potentially coalesced packets.
conn.recv(src, info)?; conn.recv(src, info)?;
let user = connection::Connection { let user = connection::Connection {
quiche: conn, quiche: conn,
session: None, session: None,
app: T::default(), app: T::default(),
}; };
self.conns.insert(conn_id, user); self.conns.insert(conn_id, user);
} }
} }
pub fn app(&mut self) -> anyhow::Result<()> { pub fn app(&mut self) -> anyhow::Result<()> {
for conn in self.conns.values_mut() { for conn in self.conns.values_mut() {
if conn.quiche.is_closed() { if conn.quiche.is_closed() {
continue; continue;
} }
if let Some(session) = &mut conn.session { if let Some(session) = &mut conn.session {
if let Err(e) = conn.app.poll(&mut conn.quiche, session) { if let Err(e) = conn.app.poll(&mut conn.quiche, session) {
log::debug!("app error: {:?}", e); log::debug!("app error: {:?}", e);
// Close the connection on any application error // Close the connection on any application error
let reason = format!("app error: {:?}", e); let reason = format!("app error: {:?}", e);
conn.quiche.close(true, 0xff, reason.as_bytes()).ok(); conn.quiche.close(true, 0xff, reason.as_bytes()).ok();
} }
} }
} }
Ok(()) Ok(())
} }
// Generate outgoing QUIC packets for all active connections and send // Generate outgoing QUIC packets for all active connections and send
// them on the UDP socket, until quiche reports that there are no more // them on the UDP socket, until quiche reports that there are no more
// packets to be sent. // packets to be sent.
pub fn send(&mut self) -> anyhow::Result<()> { pub fn send(&mut self) -> anyhow::Result<()> {
for conn in self.conns.values_mut() { for conn in self.conns.values_mut() {
let conn = &mut conn.quiche; let conn = &mut conn.quiche;
if let Err(e) = send_conn(&self.socket, conn) { if let Err(e) = send_conn(&self.socket, conn) {
log::error!("{} send failed: {:?}", conn.trace_id(), e); log::error!("{} send failed: {:?}", conn.trace_id(), e);
conn.close(false, 0x1, b"fail").ok(); conn.close(false, 0x1, b"fail").ok();
} }
} }
Ok(()) Ok(())
} }
pub fn cleanup(&mut self) { pub fn cleanup(&mut self) {
// Garbage collect closed connections. // Garbage collect closed connections.
self.conns.retain(|_, ref mut c| !c.quiche.is_closed()); self.conns.retain(|_, ref mut c| !c.quiche.is_closed());
} }
} }
// Send any pending packets for the connection over the socket. // Send any pending packets for the connection over the socket.
fn send_conn(socket: &mio::net::UdpSocket, conn: &mut quiche::Connection) -> anyhow::Result<()> { fn send_conn(socket: &mio::net::UdpSocket, conn: &mut quiche::Connection) -> anyhow::Result<()> {
let mut pkt = [0; MAX_DATAGRAM_SIZE]; let mut pkt = [0; MAX_DATAGRAM_SIZE];
loop { loop {
let (size, info) = match conn.send(&mut pkt) { let (size, info) = match conn.send(&mut pkt) {
Ok(v) => v, Ok(v) => v,
Err(quiche::Error::Done) => return Ok(()), Err(quiche::Error::Done) => return Ok(()),
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
}; };
let pkt = &pkt[..size]; let pkt = &pkt[..size];
match socket.send_to(pkt, info.to) { match socket.send_to(pkt, info.to) {
Err(e) if e.kind() == io::ErrorKind::WouldBlock => return Ok(()), Err(e) if e.kind() == io::ErrorKind::WouldBlock => return Ok(()),
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
Ok(_) => (), Ok(_) => (),
} }
} }
} }
/// Generate a stateless retry token. /// Generate a stateless retry token.
@ -349,19 +332,19 @@ fn send_conn(socket: &mio::net::UdpSocket, conn: &mut quiche::Connection) -> any
/// Note that this function is only an example and doesn't do any cryptographic /// Note that this function is only an example and doesn't do any cryptographic
/// authenticate of the token. *It should not be used in production system*. /// authenticate of the token. *It should not be used in production system*.
fn mint_token(hdr: &quiche::Header, src: &std::net::SocketAddr) -> Vec<u8> { fn mint_token(hdr: &quiche::Header, src: &std::net::SocketAddr) -> Vec<u8> {
let mut token = Vec::new(); let mut token = Vec::new();
token.extend_from_slice(b"quiche"); token.extend_from_slice(b"quiche");
let addr = match src.ip() { let addr = match src.ip() {
std::net::IpAddr::V4(a) => a.octets().to_vec(), std::net::IpAddr::V4(a) => a.octets().to_vec(),
std::net::IpAddr::V6(a) => a.octets().to_vec(), std::net::IpAddr::V6(a) => a.octets().to_vec(),
}; };
token.extend_from_slice(&addr); token.extend_from_slice(&addr);
token.extend_from_slice(&hdr.dcid); token.extend_from_slice(&hdr.dcid);
token token
} }
/// Validates a stateless retry token. /// Validates a stateless retry token.
@ -371,28 +354,25 @@ fn mint_token(hdr: &quiche::Header, src: &std::net::SocketAddr) -> Vec<u8> {
/// ///
/// Note that this function is only an example and doesn't do any cryptographic /// Note that this function is only an example and doesn't do any cryptographic
/// authenticate of the token. *It should not be used in production system*. /// authenticate of the token. *It should not be used in production system*.
fn validate_token<'a>( fn validate_token<'a>(src: &std::net::SocketAddr, token: &'a [u8]) -> Option<quiche::ConnectionId<'a>> {
src: &std::net::SocketAddr, if token.len() < 6 {
token: &'a [u8], return None;
) -> Option<quiche::ConnectionId<'a>> { }
if token.len() < 6 {
return None;
}
if &token[..6] != b"quiche" { if &token[..6] != b"quiche" {
return None; return None;
} }
let token = &token[6..]; let token = &token[6..];
let addr = match src.ip() { let addr = match src.ip() {
std::net::IpAddr::V4(a) => a.octets().to_vec(), std::net::IpAddr::V4(a) => a.octets().to_vec(),
std::net::IpAddr::V6(a) => a.octets().to_vec(), std::net::IpAddr::V6(a) => a.octets().to_vec(),
}; };
if token.len() < addr.len() || &token[..addr.len()] != addr.as_slice() { if token.len() < addr.len() || &token[..addr.len()] != addr.as_slice() {
return None; return None;
} }
Some(quiche::ConnectionId::from_ref(&token[addr.len()..])) Some(quiche::ConnectionId::from_ref(&token[addr.len()..]))
} }

View File

@ -5,145 +5,132 @@ use quiche;
#[derive(Default)] #[derive(Default)]
pub struct Streams { pub struct Streams {
ordered: Vec<Stream>, ordered: Vec<Stream>,
} }
struct Stream { struct Stream {
id: u64, id: u64,
order: u64, order: u64,
buffer: VecDeque<u8>, buffer: VecDeque<u8>,
fin: bool, fin: bool,
} }
impl Streams { impl Streams {
// Write the data to the given stream, buffering it if needed. // Write the data to the given stream, buffering it if needed.
pub fn send( pub fn send(&mut self, conn: &mut quiche::Connection, id: u64, buf: &[u8], fin: bool) -> anyhow::Result<()> {
&mut self, if buf.is_empty() && !fin {
conn: &mut quiche::Connection, return Ok(());
id: u64, }
buf: &[u8],
fin: bool,
) -> anyhow::Result<()> {
if buf.is_empty() && !fin {
return Ok(());
}
// Get the index of the stream, or add it to the list of streams. // Get the index of the stream, or add it to the list of streams.
let pos = self let pos = self.ordered.iter().position(|s| s.id == id).unwrap_or_else(|| {
.ordered // Create a new stream
.iter() let stream = Stream {
.position(|s| s.id == id) id,
.unwrap_or_else(|| { buffer: VecDeque::new(),
// Create a new stream fin: false,
let stream = Stream { order: 0, // Default to highest priority until send_order is called.
id, };
buffer: VecDeque::new(),
fin: false,
order: 0, // Default to highest priority until send_order is called.
};
self.insert(conn, stream) self.insert(conn, stream)
}); });
let stream = &mut self.ordered[pos]; let stream = &mut self.ordered[pos];
// Check if we've already closed the stream, just in case. // Check if we've already closed the stream, just in case.
if stream.fin && !buf.is_empty() { if stream.fin && !buf.is_empty() {
anyhow::bail!("stream is already finished"); anyhow::bail!("stream is already finished");
} }
// If there's no data buffered, try to write it immediately. // If there's no data buffered, try to write it immediately.
let size = if stream.buffer.is_empty() { let size = if stream.buffer.is_empty() {
match conn.stream_send(id, buf, fin) { match conn.stream_send(id, buf, fin) {
Ok(size) => size, Ok(size) => size,
Err(quiche::Error::Done) => 0, Err(quiche::Error::Done) => 0,
Err(e) => anyhow::bail!(e), Err(e) => anyhow::bail!(e),
} }
} else { } else {
0 0
}; };
if size < buf.len() { if size < buf.len() {
// Short write, save the rest for later. // Short write, save the rest for later.
stream.buffer.extend(&buf[size..]); stream.buffer.extend(&buf[size..]);
} }
stream.fin |= fin; stream.fin |= fin;
Ok(()) Ok(())
} }
// Flush any pending stream data. // Flush any pending stream data.
pub fn poll(&mut self, conn: &mut quiche::Connection) { pub fn poll(&mut self, conn: &mut quiche::Connection) {
self.ordered.retain_mut(|s| s.poll(conn).is_ok()); self.ordered.retain_mut(|s| s.poll(conn).is_ok());
} }
// Set the send order of the stream. // Set the send order of the stream.
pub fn send_order(&mut self, conn: &mut quiche::Connection, id: u64, order: u64) { pub fn send_order(&mut self, conn: &mut quiche::Connection, id: u64, order: u64) {
let mut stream = match self.ordered.iter().position(|s| s.id == id) { let mut stream = match self.ordered.iter().position(|s| s.id == id) {
// Remove the stream from the existing list. // Remove the stream from the existing list.
Some(pos) => self.ordered.remove(pos), Some(pos) => self.ordered.remove(pos),
// This is a new stream, insert it into the list. // This is a new stream, insert it into the list.
None => Stream { None => Stream {
id, id,
buffer: VecDeque::new(), buffer: VecDeque::new(),
fin: false, fin: false,
order, order,
}, },
}; };
stream.order = order; stream.order = order;
self.insert(conn, stream); self.insert(conn, stream);
} }
fn insert(&mut self, conn: &mut quiche::Connection, stream: Stream) -> usize { fn insert(&mut self, conn: &mut quiche::Connection, stream: Stream) -> usize {
// Look for the position to insert the stream. // Look for the position to insert the stream.
let pos = match self let pos = match self.ordered.binary_search_by_key(&stream.order, |s| s.order) {
.ordered Ok(pos) | Err(pos) => pos,
.binary_search_by_key(&stream.order, |s| s.order) };
{
Ok(pos) | Err(pos) => pos,
};
self.ordered.insert(pos, stream); self.ordered.insert(pos, stream);
// Reprioritize all later streams. // Reprioritize all later streams.
// TODO we can avoid this if stream_priorty takes a u64 // TODO we can avoid this if stream_priorty takes a u64
for (i, stream) in self.ordered[pos..].iter().enumerate() { for (i, stream) in self.ordered[pos..].iter().enumerate() {
_ = conn.stream_priority(stream.id, (pos + i) as u8, true); _ = conn.stream_priority(stream.id, (pos + i) as u8, true);
} }
pos pos
} }
} }
impl Stream { impl Stream {
fn poll(&mut self, conn: &mut quiche::Connection) -> quiche::Result<()> { fn poll(&mut self, conn: &mut quiche::Connection) -> quiche::Result<()> {
// Keep reading from the buffer until it's empty. // Keep reading from the buffer until it's empty.
while !self.buffer.is_empty() { while !self.buffer.is_empty() {
// VecDeque is a ring buffer, so we can't write the whole thing at once. // VecDeque is a ring buffer, so we can't write the whole thing at once.
let parts = self.buffer.as_slices(); let parts = self.buffer.as_slices();
let size = conn.stream_send(self.id, parts.0, false)?; let size = conn.stream_send(self.id, parts.0, false)?;
if size == 0 { if size == 0 {
// No more space available for this stream. // No more space available for this stream.
return Ok(()); return Ok(());
} }
// Remove the bytes that were written. // Remove the bytes that were written.
self.buffer.drain(..size); self.buffer.drain(..size);
} }
if self.fin { if self.fin {
// Write the stream done signal. // Write the stream done signal.
conn.stream_send(self.id, &[], true)?; conn.stream_send(self.id, &[], true)?;
Err(quiche::Error::Done) Err(quiche::Error::Done)
} else { } else {
Ok(()) Ok(())
} }
} }
} }

View File

@ -1,26 +1,23 @@
/* eslint-env node */ /* eslint-env node */
module.exports = { module.exports = {
extends: [ extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "prettier"],
"eslint:recommended", parser: "@typescript-eslint/parser",
"plugin:@typescript-eslint/recommended", plugins: ["@typescript-eslint", "prettier"],
"prettier", root: true,
], ignorePatterns: ["dist", "node_modules"],
parser: "@typescript-eslint/parser", rules: {
plugins: ["@typescript-eslint"], "@typescript-eslint/ban-ts-comment": "off",
root: true, "@typescript-eslint/no-non-null-assertion": "off",
ignorePatterns: ["dist", "node_modules"], "@typescript-eslint/no-explicit-any": "off",
rules: { "no-unused-vars": "off", // note you must disable the base rule as it can report incorrect errors
"@typescript-eslint/ban-ts-comment": "off", "@typescript-eslint/no-unused-vars": [
"@typescript-eslint/no-non-null-assertion": "off", "warn", // or "error"
"@typescript-eslint/no-explicit-any": "off", {
"no-unused-vars": "off", // note you must disable the base rule as it can report incorrect errors argsIgnorePattern: "^_",
"@typescript-eslint/no-unused-vars": [ varsIgnorePattern: "^_",
"warn", // or "error" caughtErrorsIgnorePattern: "^_",
{ },
argsIgnorePattern: "^_", ],
varsIgnorePattern: "^_", "prettier/prettier": 2, // Means error
caughtErrorsIgnorePattern: "^_", },
},
],
},
} }

View File

@ -1,3 +0,0 @@
{
"semi": false
}

4
web/.prettierrc.yaml Normal file
View File

@ -0,0 +1,4 @@
# note: root .editorconfig is used
# Don't insert semi-colons unless needed
semi: false

View File

@ -1,7 +1,7 @@
module.exports = function (app) { module.exports = function (app) {
app.use((req, res, next) => { app.use((req, res, next) => {
res.setHeader("Cross-Origin-Opener-Policy", "same-origin") res.setHeader("Cross-Origin-Opener-Policy", "same-origin")
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp") res.setHeader("Cross-Origin-Embedder-Policy", "require-corp")
next() next()
}) })
} }

View File

@ -1,27 +1,28 @@
{ {
"license": "Apache-2.0", "license": "Apache-2.0",
"source": "src/index.html", "source": "src/index.html",
"scripts": { "scripts": {
"serve": "parcel serve --https --cert ../cert/localhost.crt --key ../cert/localhost.key --port 4444 --open", "serve": "parcel serve --https --cert ../cert/localhost.crt --key ../cert/localhost.key --port 4444 --open",
"build": "parcel build", "build": "parcel build",
"check": "tsc --noEmit", "check": "tsc --noEmit",
"lint": "eslint .", "lint": "eslint .",
"fmt": "prettier --write ." "fmt": "prettier --write ."
}, },
"devDependencies": { "devDependencies": {
"@parcel/transformer-inline-string": "2.8.3", "@parcel/transformer-inline-string": "2.8.3",
"@parcel/validator-typescript": "^2.6.0", "@parcel/validator-typescript": "^2.6.0",
"@types/audioworklet": "^0.0.41", "@types/audioworklet": "^0.0.41",
"@types/dom-webcodecs": "^0.1.6", "@types/dom-webcodecs": "^0.1.6",
"@typescript-eslint/eslint-plugin": "^5.59.7", "@typescript-eslint/eslint-plugin": "^5.59.7",
"@typescript-eslint/parser": "^5.59.7", "@typescript-eslint/parser": "^5.59.7",
"eslint": "^8.41.0", "eslint": "^8.41.0",
"eslint-config-prettier": "^8.8.0", "eslint-config-prettier": "^8.8.0",
"parcel": "^2.8.0", "eslint-plugin-prettier": "^4.2.1",
"prettier": "^2.8.8", "parcel": "^2.8.0",
"typescript": "^5.0.4" "prettier": "^2.8.8",
}, "typescript": "^5.0.4"
"dependencies": { },
"mp4box": "^0.5.2" "dependencies": {
} "mp4box": "^0.5.2"
}
} }

View File

@ -1,104 +1,104 @@
import * as MP4 from "../mp4" import * as MP4 from "../mp4"
export class Encoder { export class Encoder {
container: MP4.ISOFile container: MP4.ISOFile
audio: AudioEncoder audio: AudioEncoder
video: VideoEncoder video: VideoEncoder
constructor() { constructor() {
this.container = new MP4.ISOFile() this.container = new MP4.ISOFile()
this.audio = new AudioEncoder({ this.audio = new AudioEncoder({
output: this.onAudio.bind(this), output: this.onAudio.bind(this),
error: console.warn, error: console.warn,
}) })
this.video = new VideoEncoder({ this.video = new VideoEncoder({
output: this.onVideo.bind(this), output: this.onVideo.bind(this),
error: console.warn, error: console.warn,
}) })
this.container.init() this.container.init()
this.audio.configure({ this.audio.configure({
codec: "mp4a.40.2", codec: "mp4a.40.2",
numberOfChannels: 2, numberOfChannels: 2,
sampleRate: 44100, sampleRate: 44100,
// TODO bitrate // TODO bitrate
}) })
this.video.configure({ this.video.configure({
codec: "avc1.42002A", // TODO h.264 baseline codec: "avc1.42002A", // TODO h.264 baseline
avc: { format: "avc" }, // or annexb avc: { format: "avc" }, // or annexb
width: 1280, width: 1280,
height: 720, height: 720,
// TODO bitrate // TODO bitrate
// TODO bitrateMode // TODO bitrateMode
// TODO framerate // TODO framerate
// TODO latencyMode // TODO latencyMode
}) })
} }
onAudio(frame: EncodedAudioChunk, metadata: EncodedAudioChunkMetadata) { onAudio(frame: EncodedAudioChunk, metadata: EncodedAudioChunkMetadata) {
const config = metadata.decoderConfig! const config = metadata.decoderConfig!
const track_id = 1 const track_id = 1
if (!this.container.getTrackById(track_id)) { if (!this.container.getTrackById(track_id)) {
this.container.addTrack({ this.container.addTrack({
id: track_id, id: track_id,
type: "mp4a", // TODO wrong type: "mp4a", // TODO wrong
timescale: 1000, // TODO verify timescale: 1000, // TODO verify
channel_count: config.numberOfChannels, channel_count: config.numberOfChannels,
samplerate: config.sampleRate, samplerate: config.sampleRate,
description: config.description, // TODO verify description: config.description, // TODO verify
// TODO description_boxes?: Box[]; // TODO description_boxes?: Box[];
}) })
} }
const buffer = new Uint8Array(frame.byteLength) const buffer = new Uint8Array(frame.byteLength)
frame.copyTo(buffer) frame.copyTo(buffer)
// TODO cts? // TODO cts?
const sample = this.container.addSample(track_id, buffer, { const sample = this.container.addSample(track_id, buffer, {
is_sync: frame.type == "key", is_sync: frame.type == "key",
duration: frame.duration!, duration: frame.duration!,
dts: frame.timestamp, dts: frame.timestamp,
}) })
const _stream = this.container.createSingleSampleMoof(sample) const _stream = this.container.createSingleSampleMoof(sample)
} }
onVideo(frame: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata) { onVideo(frame: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata) {
const config = metadata!.decoderConfig! const config = metadata!.decoderConfig!
const track_id = 2 const track_id = 2
if (!this.container.getTrackById(track_id)) { if (!this.container.getTrackById(track_id)) {
this.container.addTrack({ this.container.addTrack({
id: 2, id: 2,
type: "avc1", type: "avc1",
width: config.codedWidth, width: config.codedWidth,
height: config.codedHeight, height: config.codedHeight,
timescale: 1000, // TODO verify timescale: 1000, // TODO verify
description: config.description, // TODO verify description: config.description, // TODO verify
// TODO description_boxes?: Box[]; // TODO description_boxes?: Box[];
}) })
} }
const buffer = new Uint8Array(frame.byteLength) const buffer = new Uint8Array(frame.byteLength)
frame.copyTo(buffer) frame.copyTo(buffer)
// TODO cts? // TODO cts?
const sample = this.container.addSample(track_id, buffer, { const sample = this.container.addSample(track_id, buffer, {
is_sync: frame.type == "key", is_sync: frame.type == "key",
duration: frame.duration!, duration: frame.duration!,
dts: frame.timestamp, dts: frame.timestamp,
}) })
const _stream = this.container.createSingleSampleMoof(sample) const _stream = this.container.createSingleSampleMoof(sample)
} }
} }

View File

@ -1,5 +1,5 @@
export default class Broadcaster { export default class Broadcaster {
constructor() { constructor() {
// TODO // TODO
} }
} }

View File

@ -1,75 +1,75 @@
html, html,
body, body,
#player { #player {
width: 100%; width: 100%;
} }
body { body {
background: #000000; background: #000000;
color: #ffffff; color: #ffffff;
padding: 0; padding: 0;
margin: 0; margin: 0;
display: flex; display: flex;
justify-content: center; justify-content: center;
font-family: sans-serif; font-family: sans-serif;
} }
#screen { #screen {
position: relative; position: relative;
} }
#screen #play { #screen #play {
position: absolute; position: absolute;
width: 100%; width: 100%;
height: 100%; height: 100%;
background: rgba(0, 0, 0, 0.5); background: rgba(0, 0, 0, 0.5);
display: flex; display: flex;
justify-content: center; justify-content: center;
align-items: center; align-items: center;
z-index: 1; z-index: 1;
} }
#controls { #controls {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
padding: 8px 16px; padding: 8px 16px;
} }
#controls > * { #controls > * {
margin-right: 8px; margin-right: 8px;
} }
#controls label { #controls label {
margin-right: 8px; margin-right: 8px;
} }
#stats { #stats {
display: grid; display: grid;
grid-template-columns: auto 1fr; grid-template-columns: auto 1fr;
} }
#stats label { #stats label {
padding: 0 1rem; padding: 0 1rem;
} }
.buffer { .buffer {
position: relative; position: relative;
width: 100%; width: 100%;
} }
.buffer .fill { .buffer .fill {
position: absolute; position: absolute;
transition-duration: 0.1s; transition-duration: 0.1s;
transition-property: left, right, background-color; transition-property: left, right, background-color;
background-color: RebeccaPurple; background-color: RebeccaPurple;
height: 100%; height: 100%;
text-align: right; text-align: right;
padding-right: 0.5rem; padding-right: 0.5rem;
overflow: hidden; overflow: hidden;
} }
.buffer .fill.net { .buffer .fill.net {
background-color: Purple; background-color: Purple;
} }

View File

@ -1,33 +1,33 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<title>WARP</title> <title>WARP</title>
<link rel="stylesheet" href="index.css" /> <link rel="stylesheet" href="index.css" />
</head> </head>
<body> <body>
<div id="player"> <div id="player">
<div id="screen"> <div id="screen">
<div id="play"><span>click to play</span></div> <div id="play"><span>click to play</span></div>
<canvas id="video" width="1280" height="720"></canvas> <canvas id="video" width="1280" height="720"></canvas>
</div> </div>
<div id="controls"> <div id="controls">
<button type="button" id="live">Go Live</button> <button type="button" id="live">Go Live</button>
<button type="button" id="throttle">Throttle: None</button> <button type="button" id="throttle">Throttle: None</button>
</div> </div>
<div id="stats"> <div id="stats">
<label>Audio Buffer:</label> <label>Audio Buffer:</label>
<div class="audio buffer"></div> <div class="audio buffer"></div>
<label>Video Buffer:</label> <label>Video Buffer:</label>
<div class="video buffer"></div> <div class="video buffer"></div>
</div> </div>
</div> </div>
<script src="index.ts" type="module"></script> <script src="index.ts" type="module"></script>
</body> </body>
</html> </html>

View File

@ -7,7 +7,7 @@ import fingerprintHex from "bundle-text:../fingerprint.hex"
// Convert the hex to binary. // Convert the hex to binary.
const fingerprint = [] const fingerprint = []
for (let c = 0; c < fingerprintHex.length - 1; c += 2) { for (let c = 0; c < fingerprintHex.length - 1; c += 2) {
fingerprint.push(parseInt(fingerprintHex.substring(c, c + 2), 16)) fingerprint.push(parseInt(fingerprintHex.substring(c, c + 2), 16))
} }
const params = new URLSearchParams(window.location.search) const params = new URLSearchParams(window.location.search)
@ -16,27 +16,27 @@ const url = params.get("url") || "https://localhost:4443/watch"
const canvas = document.querySelector<HTMLCanvasElement>("canvas#video")! const canvas = document.querySelector<HTMLCanvasElement>("canvas#video")!
const transport = new Transport({ const transport = new Transport({
url: url, url: url,
fingerprint: { fingerprint: {
// TODO remove when Chrome accepts the system CA // TODO remove when Chrome accepts the system CA
algorithm: "sha-256", algorithm: "sha-256",
value: new Uint8Array(fingerprint), value: new Uint8Array(fingerprint),
}, },
}) })
const player = new Player({ const player = new Player({
transport, transport,
canvas: canvas.transferControlToOffscreen(), canvas: canvas.transferControlToOffscreen(),
}) })
const play = document.querySelector<HTMLElement>("#screen #play")! const play = document.querySelector<HTMLElement>("#screen #play")!
const playFunc = (e: Event) => { const playFunc = (e: Event) => {
player.play() player.play()
e.preventDefault() e.preventDefault()
play.removeEventListener("click", playFunc) play.removeEventListener("click", playFunc)
play.style.display = "none" play.style.display = "none"
} }
play.addEventListener("click", playFunc) play.addEventListener("click", playFunc)

View File

@ -1,16 +1,16 @@
// Rename some stuff so it's on brand. // Rename some stuff so it's on brand.
export { export {
createFile as New, createFile as New,
MP4File as File, MP4File as File,
MP4ArrayBuffer as ArrayBuffer, MP4ArrayBuffer as ArrayBuffer,
MP4Info as Info, MP4Info as Info,
MP4Track as Track, MP4Track as Track,
MP4AudioTrack as AudioTrack, MP4AudioTrack as AudioTrack,
MP4VideoTrack as VideoTrack, MP4VideoTrack as VideoTrack,
DataStream as Stream, DataStream as Stream,
Box, Box,
ISOFile, ISOFile,
Sample, Sample,
} from "mp4box" } from "mp4box"
export { Init, InitParser } from "./init" export { Init, InitParser } from "./init"

View File

@ -1,43 +1,43 @@
import * as MP4 from "./index" import * as MP4 from "./index"
export interface Init { export interface Init {
raw: MP4.ArrayBuffer raw: MP4.ArrayBuffer
info: MP4.Info info: MP4.Info
} }
export class InitParser { export class InitParser {
mp4box: MP4.File mp4box: MP4.File
offset: number offset: number
raw: MP4.ArrayBuffer[] raw: MP4.ArrayBuffer[]
info: Promise<MP4.Info> info: Promise<MP4.Info>
constructor() { constructor() {
this.mp4box = MP4.New() this.mp4box = MP4.New()
this.raw = [] this.raw = []
this.offset = 0 this.offset = 0
// Create a promise that gets resolved once the init segment has been parsed. // Create a promise that gets resolved once the init segment has been parsed.
this.info = new Promise((resolve, reject) => { this.info = new Promise((resolve, reject) => {
this.mp4box.onError = reject this.mp4box.onError = reject
this.mp4box.onReady = resolve this.mp4box.onReady = resolve
}) })
} }
push(data: Uint8Array) { push(data: Uint8Array) {
// Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately // Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately
const box = new Uint8Array(data.byteLength) const box = new Uint8Array(data.byteLength)
box.set(data) box.set(data)
// and for some reason we need to modify the underlying ArrayBuffer with fileStart // and for some reason we need to modify the underlying ArrayBuffer with fileStart
const buffer = box.buffer as MP4.ArrayBuffer const buffer = box.buffer as MP4.ArrayBuffer
buffer.fileStart = this.offset buffer.fileStart = this.offset
// Parse the data // Parse the data
this.offset = this.mp4box.appendBuffer(buffer) this.offset = this.mp4box.appendBuffer(buffer)
this.mp4box.flush() this.mp4box.flush()
// Add the box to our queue of chunks // Add the box to our queue of chunks
this.raw.push(buffer) this.raw.push(buffer)
} }
} }

View File

@ -1,239 +1,231 @@
// https://github.com/gpac/mp4box.js/issues/233 // https://github.com/gpac/mp4box.js/issues/233
declare module "mp4box" { declare module "mp4box" {
export interface MP4MediaTrack { export interface MP4MediaTrack {
id: number id: number
created: Date created: Date
modified: Date modified: Date
movie_duration: number movie_duration: number
layer: number layer: number
alternate_group: number alternate_group: number
volume: number volume: number
track_width: number track_width: number
track_height: number track_height: number
timescale: number timescale: number
duration: number duration: number
bitrate: number bitrate: number
codec: string codec: string
language: string language: string
nb_samples: number nb_samples: number
} }
export interface MP4VideoData { export interface MP4VideoData {
width: number width: number
height: number height: number
} }
export interface MP4VideoTrack extends MP4MediaTrack { export interface MP4VideoTrack extends MP4MediaTrack {
video: MP4VideoData video: MP4VideoData
} }
export interface MP4AudioData { export interface MP4AudioData {
sample_rate: number sample_rate: number
channel_count: number channel_count: number
sample_size: number sample_size: number
} }
export interface MP4AudioTrack extends MP4MediaTrack { export interface MP4AudioTrack extends MP4MediaTrack {
audio: MP4AudioData audio: MP4AudioData
} }
export type MP4Track = MP4VideoTrack | MP4AudioTrack export type MP4Track = MP4VideoTrack | MP4AudioTrack
export interface MP4Info { export interface MP4Info {
duration: number duration: number
timescale: number timescale: number
fragment_duration: number fragment_duration: number
isFragmented: boolean isFragmented: boolean
isProgressive: boolean isProgressive: boolean
hasIOD: boolean hasIOD: boolean
brands: string[] brands: string[]
created: Date created: Date
modified: Date modified: Date
tracks: MP4Track[] tracks: MP4Track[]
mime: string mime: string
audioTracks: MP4AudioTrack[] audioTracks: MP4AudioTrack[]
videoTracks: MP4VideoTrack[] videoTracks: MP4VideoTrack[]
} }
export type MP4ArrayBuffer = ArrayBuffer & { fileStart: number } export type MP4ArrayBuffer = ArrayBuffer & { fileStart: number }
export interface MP4File { export interface MP4File {
onMoovStart?: () => void onMoovStart?: () => void
onReady?: (info: MP4Info) => void onReady?: (info: MP4Info) => void
onError?: (e: string) => void onError?: (e: string) => void
onSamples?: (id: number, user: any, samples: Sample[]) => void onSamples?: (id: number, user: any, samples: Sample[]) => void
appendBuffer(data: MP4ArrayBuffer): number appendBuffer(data: MP4ArrayBuffer): number
start(): void start(): void
stop(): void stop(): void
flush(): void flush(): void
setExtractionOptions( setExtractionOptions(id: number, user: any, options: ExtractionOptions): void
id: number, }
user: any,
options: ExtractionOptions
): void
}
export function createFile(): MP4File export function createFile(): MP4File
export interface Sample { export interface Sample {
number: number number: number
track_id: number track_id: number
timescale: number timescale: number
description_index: number description_index: number
description: any description: any
data: ArrayBuffer data: ArrayBuffer
size: number size: number
alreadyRead?: number alreadyRead?: number
duration: number duration: number
cts: number cts: number
dts: number dts: number
is_sync: boolean is_sync: boolean
is_leading: number is_leading: number
depends_on: number depends_on: number
is_depended_on: number is_depended_on: number
has_redundancy: number has_redundancy: number
degration_priority: number degration_priority: number
offset: number offset: number
subsamples: any subsamples: any
} }
export interface ExtractionOptions { export interface ExtractionOptions {
nbSamples: number nbSamples: number
} }
const BIG_ENDIAN: boolean const BIG_ENDIAN: boolean
const LITTLE_ENDIAN: boolean const LITTLE_ENDIAN: boolean
export class DataStream { export class DataStream {
constructor( constructor(buffer?: ArrayBuffer, byteOffset?: number, littleEndian?: boolean)
buffer?: ArrayBuffer, getPosition(): number
byteOffset?: number,
littleEndian?: boolean
)
getPosition(): number
get byteLength(): number get byteLength(): number
get buffer(): ArrayBuffer get buffer(): ArrayBuffer
set buffer(v: ArrayBuffer) set buffer(v: ArrayBuffer)
get byteOffset(): number get byteOffset(): number
set byteOffset(v: number) set byteOffset(v: number)
get dataView(): DataView get dataView(): DataView
set dataView(v: DataView) set dataView(v: DataView)
seek(pos: number): void seek(pos: number): void
isEof(): boolean isEof(): boolean
mapUint8Array(length: number): Uint8Array mapUint8Array(length: number): Uint8Array
readInt32Array(length: number, littleEndian: boolean): Int32Array readInt32Array(length: number, littleEndian: boolean): Int32Array
readInt16Array(length: number, littleEndian: boolean): Int16Array readInt16Array(length: number, littleEndian: boolean): Int16Array
readInt8Array(length: number): Int8Array readInt8Array(length: number): Int8Array
readUint32Array(length: number, littleEndian: boolean): Uint32Array readUint32Array(length: number, littleEndian: boolean): Uint32Array
readUint16Array(length: number, littleEndian: boolean): Uint16Array readUint16Array(length: number, littleEndian: boolean): Uint16Array
readUint8Array(length: number): Uint8Array readUint8Array(length: number): Uint8Array
readFloat64Array(length: number, littleEndian: boolean): Float64Array readFloat64Array(length: number, littleEndian: boolean): Float64Array
readFloat32Array(length: number, littleEndian: boolean): Float32Array readFloat32Array(length: number, littleEndian: boolean): Float32Array
readInt32(littleEndian: boolean): number readInt32(littleEndian: boolean): number
readInt16(littleEndian: boolean): number readInt16(littleEndian: boolean): number
readInt8(): number readInt8(): number
readUint32(littleEndian: boolean): number readUint32(littleEndian: boolean): number
readUint16(littleEndian: boolean): number readUint16(littleEndian: boolean): number
readUint8(): number readUint8(): number
readFloat32(littleEndian: boolean): number readFloat32(littleEndian: boolean): number
readFloat64(littleEndian: boolean): number readFloat64(littleEndian: boolean): number
endianness: boolean endianness: boolean
memcpy( memcpy(
dst: ArrayBufferLike, dst: ArrayBufferLike,
dstOffset: number, dstOffset: number,
src: ArrayBufferLike, src: ArrayBufferLike,
srcOffset: number, srcOffset: number,
byteLength: number byteLength: number
): void ): void
// TODO I got bored porting the remaining functions // TODO I got bored porting the remaining functions
} }
export class Box { export class Box {
write(stream: DataStream): void write(stream: DataStream): void
} }
export interface TrackOptions { export interface TrackOptions {
id?: number id?: number
type?: string type?: string
width?: number width?: number
height?: number height?: number
duration?: number duration?: number
layer?: number layer?: number
timescale?: number timescale?: number
media_duration?: number media_duration?: number
language?: string language?: string
hdlr?: string hdlr?: string
// video // video
avcDecoderConfigRecord?: any avcDecoderConfigRecord?: any
// audio // audio
balance?: number balance?: number
channel_count?: number channel_count?: number
samplesize?: number samplesize?: number
samplerate?: number samplerate?: number
//captions //captions
namespace?: string namespace?: string
schema_location?: string schema_location?: string
auxiliary_mime_types?: string auxiliary_mime_types?: string
description?: any description?: any
description_boxes?: Box[] description_boxes?: Box[]
default_sample_description_index_id?: number default_sample_description_index_id?: number
default_sample_duration?: number default_sample_duration?: number
default_sample_size?: number default_sample_size?: number
default_sample_flags?: number default_sample_flags?: number
} }
export interface FileOptions { export interface FileOptions {
brands?: string[] brands?: string[]
timescale?: number timescale?: number
rate?: number rate?: number
duration?: number duration?: number
width?: number width?: number
} }
export interface SampleOptions { export interface SampleOptions {
sample_description_index?: number sample_description_index?: number
duration?: number duration?: number
cts?: number cts?: number
dts?: number dts?: number
is_sync?: boolean is_sync?: boolean
is_leading?: number is_leading?: number
depends_on?: number depends_on?: number
is_depended_on?: number is_depended_on?: number
has_redundancy?: number has_redundancy?: number
degradation_priority?: number degradation_priority?: number
subsamples?: any subsamples?: any
} }
// TODO add the remaining functions // TODO add the remaining functions
// TODO move to another module // TODO move to another module
export class ISOFile { export class ISOFile {
constructor(stream?: DataStream) constructor(stream?: DataStream)
init(options?: FileOptions): ISOFile init(options?: FileOptions): ISOFile
addTrack(options?: TrackOptions): number addTrack(options?: TrackOptions): number
addSample(track: number, data: ArrayBuffer, options?: SampleOptions): Sample addSample(track: number, data: ArrayBuffer, options?: SampleOptions): Sample
createSingleSampleMoof(sample: Sample): Box createSingleSampleMoof(sample: Sample): Box
// helpers // helpers
getTrackById(id: number): Box | undefined getTrackById(id: number): Box | undefined
getTrexById(id: number): Box | undefined getTrexById(id: number): Box | undefined
} }
export {} export {}
} }

View File

@ -2,81 +2,78 @@ import * as Message from "./message"
import { Ring } from "./ring" import { Ring } from "./ring"
export default class Audio { export default class Audio {
ring?: Ring ring?: Ring
queue: Array<AudioData> queue: Array<AudioData>
render?: number // non-zero if requestAnimationFrame has been called render?: number // non-zero if requestAnimationFrame has been called
last?: number // the timestamp of the last rendered frame, in microseconds last?: number // the timestamp of the last rendered frame, in microseconds
constructor(_config: Message.Config) { constructor(_config: Message.Config) {
this.queue = [] this.queue = []
} }
push(frame: AudioData) { push(frame: AudioData) {
// Drop any old frames // Drop any old frames
if (this.last && frame.timestamp <= this.last) { if (this.last && frame.timestamp <= this.last) {
frame.close() frame.close()
return return
} }
// Insert the frame into the queue sorted by timestamp. // Insert the frame into the queue sorted by timestamp.
if ( if (this.queue.length > 0 && this.queue[this.queue.length - 1].timestamp <= frame.timestamp) {
this.queue.length > 0 && // Fast path because we normally append to the end.
this.queue[this.queue.length - 1].timestamp <= frame.timestamp this.queue.push(frame)
) { } else {
// Fast path because we normally append to the end. // Do a full binary search
this.queue.push(frame) let low = 0
} else { let high = this.queue.length
// Do a full binary search
let low = 0
let high = this.queue.length
while (low < high) { while (low < high) {
const mid = (low + high) >>> 1 const mid = (low + high) >>> 1
if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1 if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1
else high = mid else high = mid
} }
this.queue.splice(low, 0, frame) this.queue.splice(low, 0, frame)
} }
this.emit() this.emit()
} }
emit() { emit() {
const ring = this.ring const ring = this.ring
if (!ring) { if (!ring) {
return return
} }
while (this.queue.length) { while (this.queue.length) {
const frame = this.queue[0] const frame = this.queue[0]
if (ring.size() + frame.numberOfFrames > ring.capacity) { if (ring.size() + frame.numberOfFrames > ring.capacity) {
// Buffer is full // Buffer is full
break break
} }
const size = ring.write(frame) const size = ring.write(frame)
if (size < frame.numberOfFrames) { if (size < frame.numberOfFrames) {
throw new Error("audio buffer is full") throw new Error("audio buffer is full")
} }
this.last = frame.timestamp this.last = frame.timestamp
frame.close() frame.close()
this.queue.shift() this.queue.shift()
} }
} }
play(play: Message.Play) { play(play: Message.Play) {
this.ring = new Ring(play.buffer) this.ring = new Ring(play.buffer)
if (!this.render) { if (!this.render) {
const sampleRate = 44100 // TODO dynamic const sampleRate = 44100 // TODO dynamic
// Refresh every half buffer // Refresh every half buffer
const refresh = ((play.buffer.capacity / sampleRate) * 1000) / 2 const refresh = ((play.buffer.capacity / sampleRate) * 1000) / 2
this.render = setInterval(this.emit.bind(this), refresh) this.render = setInterval(this.emit.bind(this), refresh)
} }
} }
} }

View File

@ -5,175 +5,167 @@ import * as Stream from "../stream"
import Renderer from "./renderer" import Renderer from "./renderer"
export default class Decoder { export default class Decoder {
init: MP4.InitParser init: MP4.InitParser
decoders: Map<number, AudioDecoder | VideoDecoder> decoders: Map<number, AudioDecoder | VideoDecoder>
renderer: Renderer renderer: Renderer
constructor(renderer: Renderer) { constructor(renderer: Renderer) {
this.init = new MP4.InitParser() this.init = new MP4.InitParser()
this.decoders = new Map() this.decoders = new Map()
this.renderer = renderer this.renderer = renderer
} }
async receiveInit(msg: Message.Init) { async receiveInit(msg: Message.Init) {
const stream = new Stream.Reader(msg.reader, msg.buffer) const stream = new Stream.Reader(msg.reader, msg.buffer)
for (;;) { for (;;) {
const data = await stream.read() const data = await stream.read()
if (!data) break if (!data) break
this.init.push(data) this.init.push(data)
} }
// TODO make sure the init segment is fully received // TODO make sure the init segment is fully received
} }
async receiveSegment(msg: Message.Segment) { async receiveSegment(msg: Message.Segment) {
// Wait for the init segment to be fully received and parsed // Wait for the init segment to be fully received and parsed
const init = await this.init.info const init = await this.init.info
const input = MP4.New() const input = MP4.New()
input.onSamples = this.onSamples.bind(this) input.onSamples = this.onSamples.bind(this)
input.onReady = (track: any) => { input.onReady = (track: any) => {
// Extract all of the tracks, because we don't know if it's audio or video. // Extract all of the tracks, because we don't know if it's audio or video.
for (const i of init.tracks) { for (const i of init.tracks) {
input.setExtractionOptions(track.id, i, { nbSamples: 1 }) input.setExtractionOptions(track.id, i, { nbSamples: 1 })
} }
input.start() input.start()
} }
// MP4box requires us to reparse the init segment unfortunately // MP4box requires us to reparse the init segment unfortunately
let offset = 0 let offset = 0
for (const raw of this.init.raw) { for (const raw of this.init.raw) {
raw.fileStart = offset raw.fileStart = offset
offset = input.appendBuffer(raw) offset = input.appendBuffer(raw)
} }
const stream = new Stream.Reader(msg.reader, msg.buffer) const stream = new Stream.Reader(msg.reader, msg.buffer)
// For whatever reason, mp4box doesn't work until you read an atom at a time. // For whatever reason, mp4box doesn't work until you read an atom at a time.
while (!(await stream.done())) { while (!(await stream.done())) {
const raw = await stream.peek(4) const raw = await stream.peek(4)
// TODO this doesn't support when size = 0 (until EOF) or size = 1 (extended size) // TODO this doesn't support when size = 0 (until EOF) or size = 1 (extended size)
const size = new DataView( const size = new DataView(raw.buffer, raw.byteOffset, raw.byteLength).getUint32(0)
raw.buffer, const atom = await stream.bytes(size)
raw.byteOffset,
raw.byteLength
).getUint32(0)
const atom = await stream.bytes(size)
// Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately // Make a copy of the atom because mp4box only accepts an ArrayBuffer unfortunately
const box = new Uint8Array(atom.byteLength) const box = new Uint8Array(atom.byteLength)
box.set(atom) box.set(atom)
// and for some reason we need to modify the underlying ArrayBuffer with offset // and for some reason we need to modify the underlying ArrayBuffer with offset
const buffer = box.buffer as MP4.ArrayBuffer const buffer = box.buffer as MP4.ArrayBuffer
buffer.fileStart = offset buffer.fileStart = offset
// Parse the data // Parse the data
offset = input.appendBuffer(buffer) offset = input.appendBuffer(buffer)
input.flush() input.flush()
} }
} }
onSamples(track_id: number, track: MP4.Track, samples: MP4.Sample[]) { onSamples(track_id: number, track: MP4.Track, samples: MP4.Sample[]) {
let decoder = this.decoders.get(track_id) let decoder = this.decoders.get(track_id)
if (!decoder) { if (!decoder) {
// We need a sample to initalize the video decoder, because of mp4box limitations. // We need a sample to initalize the video decoder, because of mp4box limitations.
const sample = samples[0] const sample = samples[0]
if (isVideoTrack(track)) { if (isVideoTrack(track)) {
// Configure the decoder using the AVC box for H.264 // Configure the decoder using the AVC box for H.264
// TODO it should be easy to support other codecs, just need to know the right boxes. // TODO it should be easy to support other codecs, just need to know the right boxes.
const avcc = sample.description.avcC const avcc = sample.description.avcC
if (!avcc) throw new Error("TODO only h264 is supported") if (!avcc) throw new Error("TODO only h264 is supported")
const description = new MP4.Stream(new Uint8Array(avcc.size), 0, false) const description = new MP4.Stream(new Uint8Array(avcc.size), 0, false)
avcc.write(description) avcc.write(description)
const videoDecoder = new VideoDecoder({ const videoDecoder = new VideoDecoder({
output: this.renderer.push.bind(this.renderer), output: this.renderer.push.bind(this.renderer),
error: console.warn, error: console.warn,
}) })
videoDecoder.configure({ videoDecoder.configure({
codec: track.codec, codec: track.codec,
codedHeight: track.video.height, codedHeight: track.video.height,
codedWidth: track.video.width, codedWidth: track.video.width,
description: description.buffer?.slice(8), description: description.buffer?.slice(8),
// optimizeForLatency: true // optimizeForLatency: true
}) })
decoder = videoDecoder decoder = videoDecoder
} else if (isAudioTrack(track)) { } else if (isAudioTrack(track)) {
const audioDecoder = new AudioDecoder({ const audioDecoder = new AudioDecoder({
output: this.renderer.push.bind(this.renderer), output: this.renderer.push.bind(this.renderer),
error: console.warn, error: console.warn,
}) })
audioDecoder.configure({ audioDecoder.configure({
codec: track.codec, codec: track.codec,
numberOfChannels: track.audio.channel_count, numberOfChannels: track.audio.channel_count,
sampleRate: track.audio.sample_rate, sampleRate: track.audio.sample_rate,
}) })
decoder = audioDecoder decoder = audioDecoder
} else { } else {
throw new Error("unknown track type") throw new Error("unknown track type")
} }
this.decoders.set(track_id, decoder) this.decoders.set(track_id, decoder)
} }
for (const sample of samples) { for (const sample of samples) {
// Convert to microseconds // Convert to microseconds
const timestamp = (1000 * 1000 * sample.dts) / sample.timescale const timestamp = (1000 * 1000 * sample.dts) / sample.timescale
const duration = (1000 * 1000 * sample.duration) / sample.timescale const duration = (1000 * 1000 * sample.duration) / sample.timescale
if (isAudioDecoder(decoder)) { if (isAudioDecoder(decoder)) {
decoder.decode( decoder.decode(
new EncodedAudioChunk({ new EncodedAudioChunk({
type: sample.is_sync ? "key" : "delta", type: sample.is_sync ? "key" : "delta",
data: sample.data, data: sample.data,
duration: duration, duration: duration,
timestamp: timestamp, timestamp: timestamp,
}) })
) )
} else if (isVideoDecoder(decoder)) { } else if (isVideoDecoder(decoder)) {
decoder.decode( decoder.decode(
new EncodedVideoChunk({ new EncodedVideoChunk({
type: sample.is_sync ? "key" : "delta", type: sample.is_sync ? "key" : "delta",
data: sample.data, data: sample.data,
duration: duration, duration: duration,
timestamp: timestamp, timestamp: timestamp,
}) })
) )
} else { } else {
throw new Error("unknown decoder type") throw new Error("unknown decoder type")
} }
} }
} }
} }
function isAudioDecoder( function isAudioDecoder(decoder: AudioDecoder | VideoDecoder): decoder is AudioDecoder {
decoder: AudioDecoder | VideoDecoder return decoder instanceof AudioDecoder
): decoder is AudioDecoder {
return decoder instanceof AudioDecoder
} }
function isVideoDecoder( function isVideoDecoder(decoder: AudioDecoder | VideoDecoder): decoder is VideoDecoder {
decoder: AudioDecoder | VideoDecoder return decoder instanceof VideoDecoder
): decoder is VideoDecoder {
return decoder instanceof VideoDecoder
} }
function isAudioTrack(track: MP4.Track): track is MP4.AudioTrack { function isAudioTrack(track: MP4.Track): track is MP4.AudioTrack {
return (track as MP4.AudioTrack).audio !== undefined return (track as MP4.AudioTrack).audio !== undefined
} }
function isVideoTrack(track: MP4.Track): track is MP4.VideoTrack { function isVideoTrack(track: MP4.Track): track is MP4.VideoTrack {
return (track as MP4.VideoTrack).video !== undefined return (track as MP4.VideoTrack).video !== undefined
} }

View File

@ -3,89 +3,86 @@ import * as Ring from "./ring"
import Transport from "../transport" import Transport from "../transport"
export interface Config { export interface Config {
transport: Transport transport: Transport
canvas: OffscreenCanvas canvas: OffscreenCanvas
} }
// This class must be created on the main thread due to AudioContext. // This class must be created on the main thread due to AudioContext.
export default class Player { export default class Player {
context: AudioContext context: AudioContext
worker: Worker worker: Worker
worklet: Promise<AudioWorkletNode> worklet: Promise<AudioWorkletNode>
transport: Transport transport: Transport
constructor(config: Config) { constructor(config: Config) {
this.transport = config.transport this.transport = config.transport
this.transport.callback = this this.transport.callback = this
this.context = new AudioContext({ this.context = new AudioContext({
latencyHint: "interactive", latencyHint: "interactive",
sampleRate: 44100, sampleRate: 44100,
}) })
this.worker = this.setupWorker(config) this.worker = this.setupWorker(config)
this.worklet = this.setupWorklet(config) this.worklet = this.setupWorklet(config)
} }
private setupWorker(config: Config): Worker { private setupWorker(config: Config): Worker {
const url = new URL("worker.ts", import.meta.url) const url = new URL("worker.ts", import.meta.url)
const worker = new Worker(url, { const worker = new Worker(url, {
type: "module", type: "module",
name: "media", name: "media",
}) })
const msg = { const msg = {
canvas: config.canvas, canvas: config.canvas,
} }
worker.postMessage({ config: msg }, [msg.canvas]) worker.postMessage({ config: msg }, [msg.canvas])
return worker return worker
} }
private async setupWorklet(_config: Config): Promise<AudioWorkletNode> { private async setupWorklet(_config: Config): Promise<AudioWorkletNode> {
// Load the worklet source code. // Load the worklet source code.
const url = new URL("worklet.ts", import.meta.url) const url = new URL("worklet.ts", import.meta.url)
await this.context.audioWorklet.addModule(url) await this.context.audioWorklet.addModule(url)
const volume = this.context.createGain() const volume = this.context.createGain()
volume.gain.value = 2.0 volume.gain.value = 2.0
// Create a worklet // Create a worklet
const worklet = new AudioWorkletNode(this.context, "renderer") const worklet = new AudioWorkletNode(this.context, "renderer")
worklet.onprocessorerror = (e: Event) => { worklet.onprocessorerror = (e: Event) => {
console.error("Audio worklet error:", e) console.error("Audio worklet error:", e)
} }
// Connect the worklet to the volume node and then to the speakers // Connect the worklet to the volume node and then to the speakers
worklet.connect(volume) worklet.connect(volume)
volume.connect(this.context.destination) volume.connect(this.context.destination)
return worklet return worklet
} }
onInit(init: Message.Init) { onInit(init: Message.Init) {
this.worker.postMessage({ init }, [init.buffer.buffer, init.reader]) this.worker.postMessage({ init }, [init.buffer.buffer, init.reader])
} }
onSegment(segment: Message.Segment) { onSegment(segment: Message.Segment) {
this.worker.postMessage({ segment }, [ this.worker.postMessage({ segment }, [segment.buffer.buffer, segment.reader])
segment.buffer.buffer, }
segment.reader,
])
}
async play() { async play() {
this.context.resume() this.context.resume()
const play = { const play = {
buffer: new Ring.Buffer(2, 44100 / 10), // 100ms of audio buffer: new Ring.Buffer(2, 44100 / 10), // 100ms of audio
} }
const worklet = await this.worklet const worklet = await this.worklet
worklet.port.postMessage({ play }) worklet.port.postMessage({ play })
this.worker.postMessage({ play }) this.worker.postMessage({ play })
} }
} }

View File

@ -1,21 +1,21 @@
import * as Ring from "./ring" import * as Ring from "./ring"
export interface Config { export interface Config {
// video stuff // video stuff
canvas: OffscreenCanvas canvas: OffscreenCanvas
} }
export interface Init { export interface Init {
buffer: Uint8Array // unread buffered data buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data reader: ReadableStream // unread unbuffered data
} }
export interface Segment { export interface Segment {
buffer: Uint8Array // unread buffered data buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data reader: ReadableStream // unread unbuffered data
} }
export interface Play { export interface Play {
timestamp?: number timestamp?: number
buffer: Ring.Buffer buffer: Ring.Buffer
} }

View File

@ -3,34 +3,34 @@ import Audio from "./audio"
import Video from "./video" import Video from "./video"
export default class Renderer { export default class Renderer {
audio: Audio audio: Audio
video: Video video: Video
constructor(config: Message.Config) { constructor(config: Message.Config) {
this.audio = new Audio(config) this.audio = new Audio(config)
this.video = new Video(config) this.video = new Video(config)
} }
push(frame: AudioData | VideoFrame) { push(frame: AudioData | VideoFrame) {
if (isAudioData(frame)) { if (isAudioData(frame)) {
this.audio.push(frame) this.audio.push(frame)
} else if (isVideoFrame(frame)) { } else if (isVideoFrame(frame)) {
this.video.push(frame) this.video.push(frame)
} else { } else {
throw new Error("unknown frame type") throw new Error("unknown frame type")
} }
} }
play(play: Message.Play) { play(play: Message.Play) {
this.audio.play(play) this.audio.play(play)
this.video.play(play) this.video.play(play)
} }
} }
function isAudioData(frame: AudioData | VideoFrame): frame is AudioData { function isAudioData(frame: AudioData | VideoFrame): frame is AudioData {
return frame instanceof AudioData return frame instanceof AudioData
} }
function isVideoFrame(frame: AudioData | VideoFrame): frame is VideoFrame { function isVideoFrame(frame: AudioData | VideoFrame): frame is VideoFrame {
return frame instanceof VideoFrame return frame instanceof VideoFrame
} }

View File

@ -1,159 +1,155 @@
// Ring buffer with audio samples. // Ring buffer with audio samples.
enum STATE { enum STATE {
READ_POS = 0, // The current read position READ_POS = 0, // The current read position
WRITE_POS, // The current write position WRITE_POS, // The current write position
LENGTH, // Clever way of saving the total number of enums values. LENGTH, // Clever way of saving the total number of enums values.
} }
// No prototype to make this easier to send via postMessage // No prototype to make this easier to send via postMessage
export class Buffer { export class Buffer {
state: SharedArrayBuffer state: SharedArrayBuffer
channels: SharedArrayBuffer[] channels: SharedArrayBuffer[]
capacity: number capacity: number
constructor(channels: number, capacity: number) { constructor(channels: number, capacity: number) {
// Store the current state in a separate ring buffer. // Store the current state in a separate ring buffer.
this.state = new SharedArrayBuffer( this.state = new SharedArrayBuffer(STATE.LENGTH * Int32Array.BYTES_PER_ELEMENT)
STATE.LENGTH * Int32Array.BYTES_PER_ELEMENT
)
// Create a buffer for each audio channel // Create a buffer for each audio channel
this.channels = [] this.channels = []
for (let i = 0; i < channels; i += 1) { for (let i = 0; i < channels; i += 1) {
const buffer = new SharedArrayBuffer( const buffer = new SharedArrayBuffer(capacity * Float32Array.BYTES_PER_ELEMENT)
capacity * Float32Array.BYTES_PER_ELEMENT this.channels.push(buffer)
) }
this.channels.push(buffer)
}
this.capacity = capacity this.capacity = capacity
} }
} }
export class Ring { export class Ring {
state: Int32Array state: Int32Array
channels: Float32Array[] channels: Float32Array[]
capacity: number capacity: number
constructor(buffer: Buffer) { constructor(buffer: Buffer) {
this.state = new Int32Array(buffer.state) this.state = new Int32Array(buffer.state)
this.channels = [] this.channels = []
for (const channel of buffer.channels) { for (const channel of buffer.channels) {
this.channels.push(new Float32Array(channel)) this.channels.push(new Float32Array(channel))
} }
this.capacity = buffer.capacity this.capacity = buffer.capacity
} }
// Write samples for single audio frame, returning the total number written. // Write samples for single audio frame, returning the total number written.
write(frame: AudioData): number { write(frame: AudioData): number {
const readPos = Atomics.load(this.state, STATE.READ_POS) const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS) const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = writePos const startPos = writePos
let endPos = writePos + frame.numberOfFrames let endPos = writePos + frame.numberOfFrames
if (endPos > readPos + this.capacity) { if (endPos > readPos + this.capacity) {
endPos = readPos + this.capacity endPos = readPos + this.capacity
if (endPos <= startPos) { if (endPos <= startPos) {
// No space to write // No space to write
return 0 return 0
} }
} }
const startIndex = startPos % this.capacity const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity const endIndex = endPos % this.capacity
// Loop over each channel // Loop over each channel
for (let i = 0; i < this.channels.length; i += 1) { for (let i = 0; i < this.channels.length; i += 1) {
const channel = this.channels[i] const channel = this.channels[i]
if (startIndex < endIndex) { if (startIndex < endIndex) {
// One continuous range to copy. // One continuous range to copy.
const full = channel.subarray(startIndex, endIndex) const full = channel.subarray(startIndex, endIndex)
frame.copyTo(full, { frame.copyTo(full, {
planeIndex: i, planeIndex: i,
frameCount: endIndex - startIndex, frameCount: endIndex - startIndex,
}) })
} else { } else {
const first = channel.subarray(startIndex) const first = channel.subarray(startIndex)
const second = channel.subarray(0, endIndex) const second = channel.subarray(0, endIndex)
frame.copyTo(first, { frame.copyTo(first, {
planeIndex: i, planeIndex: i,
frameCount: first.length, frameCount: first.length,
}) })
// We need this conditional when startIndex == 0 and endIndex == 0 // We need this conditional when startIndex == 0 and endIndex == 0
// When capacity=4410 and frameCount=1024, this was happening 52s into the audio. // When capacity=4410 and frameCount=1024, this was happening 52s into the audio.
if (second.length) { if (second.length) {
frame.copyTo(second, { frame.copyTo(second, {
planeIndex: i, planeIndex: i,
frameOffset: first.length, frameOffset: first.length,
frameCount: second.length, frameCount: second.length,
}) })
} }
} }
} }
Atomics.store(this.state, STATE.WRITE_POS, endPos) Atomics.store(this.state, STATE.WRITE_POS, endPos)
return endPos - startPos return endPos - startPos
} }
read(dst: Float32Array[]): number { read(dst: Float32Array[]): number {
const readPos = Atomics.load(this.state, STATE.READ_POS) const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS) const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = readPos const startPos = readPos
let endPos = startPos + dst[0].length let endPos = startPos + dst[0].length
if (endPos > writePos) { if (endPos > writePos) {
endPos = writePos endPos = writePos
if (endPos <= startPos) { if (endPos <= startPos) {
// Nothing to read // Nothing to read
return 0 return 0
} }
} }
const startIndex = startPos % this.capacity const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity const endIndex = endPos % this.capacity
// Loop over each channel // Loop over each channel
for (let i = 0; i < dst.length; i += 1) { for (let i = 0; i < dst.length; i += 1) {
if (i >= this.channels.length) { if (i >= this.channels.length) {
// ignore excess channels // ignore excess channels
} }
const input = this.channels[i] const input = this.channels[i]
const output = dst[i] const output = dst[i]
if (startIndex < endIndex) { if (startIndex < endIndex) {
const full = input.subarray(startIndex, endIndex) const full = input.subarray(startIndex, endIndex)
output.set(full) output.set(full)
} else { } else {
const first = input.subarray(startIndex) const first = input.subarray(startIndex)
const second = input.subarray(0, endIndex) const second = input.subarray(0, endIndex)
output.set(first) output.set(first)
output.set(second, first.length) output.set(second, first.length)
} }
} }
Atomics.store(this.state, STATE.READ_POS, endPos) Atomics.store(this.state, STATE.READ_POS, endPos)
return endPos - startPos return endPos - startPos
} }
size() { size() {
// TODO is this thread safe? // TODO is this thread safe?
const readPos = Atomics.load(this.state, STATE.READ_POS) const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS) const writePos = Atomics.load(this.state, STATE.WRITE_POS)
return writePos - readPos return writePos - readPos
} }
} }

View File

@ -1,101 +1,98 @@
import * as Message from "./message" import * as Message from "./message"
export default class Video { export default class Video {
canvas: OffscreenCanvas canvas: OffscreenCanvas
queue: Array<VideoFrame> queue: Array<VideoFrame>
render: number // non-zero if requestAnimationFrame has been called render: number // non-zero if requestAnimationFrame has been called
sync?: number // the wall clock value for timestamp 0, in microseconds sync?: number // the wall clock value for timestamp 0, in microseconds
last?: number // the timestamp of the last rendered frame, in microseconds last?: number // the timestamp of the last rendered frame, in microseconds
constructor(config: Message.Config) { constructor(config: Message.Config) {
this.canvas = config.canvas this.canvas = config.canvas
this.queue = [] this.queue = []
this.render = 0 this.render = 0
} }
push(frame: VideoFrame) { push(frame: VideoFrame) {
// Drop any old frames // Drop any old frames
if (this.last && frame.timestamp <= this.last) { if (this.last && frame.timestamp <= this.last) {
frame.close() frame.close()
return return
} }
// Insert the frame into the queue sorted by timestamp. // Insert the frame into the queue sorted by timestamp.
if ( if (this.queue.length > 0 && this.queue[this.queue.length - 1].timestamp <= frame.timestamp) {
this.queue.length > 0 && // Fast path because we normally append to the end.
this.queue[this.queue.length - 1].timestamp <= frame.timestamp this.queue.push(frame)
) { } else {
// Fast path because we normally append to the end. // Do a full binary search
this.queue.push(frame) let low = 0
} else { let high = this.queue.length
// Do a full binary search
let low = 0
let high = this.queue.length
while (low < high) { while (low < high) {
const mid = (low + high) >>> 1 const mid = (low + high) >>> 1
if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1 if (this.queue[mid].timestamp < frame.timestamp) low = mid + 1
else high = mid else high = mid
} }
this.queue.splice(low, 0, frame) this.queue.splice(low, 0, frame)
} }
} }
draw(now: number) { draw(now: number) {
// Draw and then queue up the next draw call. // Draw and then queue up the next draw call.
this.drawOnce(now) this.drawOnce(now)
// Queue up the new draw frame. // Queue up the new draw frame.
this.render = self.requestAnimationFrame(this.draw.bind(this)) this.render = self.requestAnimationFrame(this.draw.bind(this))
} }
drawOnce(now: number) { drawOnce(now: number) {
// Convert to microseconds // Convert to microseconds
now *= 1000 now *= 1000
if (!this.queue.length) { if (!this.queue.length) {
return return
} }
let frame = this.queue[0] let frame = this.queue[0]
if (!this.sync) { if (!this.sync) {
this.sync = now - frame.timestamp this.sync = now - frame.timestamp
} }
// Determine the target timestamp. // Determine the target timestamp.
const target = now - this.sync const target = now - this.sync
if (frame.timestamp >= target) { if (frame.timestamp >= target) {
// nothing to render yet, wait for the next animation frame // nothing to render yet, wait for the next animation frame
return return
} }
this.queue.shift() this.queue.shift()
// Check if we should skip some frames // Check if we should skip some frames
while (this.queue.length) { while (this.queue.length) {
const next = this.queue[0] const next = this.queue[0]
if (next.timestamp > target) break if (next.timestamp > target) break
frame.close() frame.close()
frame = this.queue.shift()! frame = this.queue.shift()!
} }
const ctx = this.canvas.getContext("2d") const ctx = this.canvas.getContext("2d")
ctx!.drawImage(frame, 0, 0, this.canvas.width, this.canvas.height) // TODO aspect ratio ctx!.drawImage(frame, 0, 0, this.canvas.width, this.canvas.height) // TODO aspect ratio
this.last = frame.timestamp this.last = frame.timestamp
frame.close() frame.close()
} }
play(_play: Message.Play) { play(_play: Message.Play) {
// Queue up to render the next frame. // Queue up to render the next frame.
if (!this.render) { if (!this.render) {
this.render = self.requestAnimationFrame(this.draw.bind(this)) this.render = self.requestAnimationFrame(this.draw.bind(this))
} }
} }
} }

View File

@ -6,19 +6,19 @@ let decoder: Decoder
let renderer: Renderer let renderer: Renderer
self.addEventListener("message", async (e: MessageEvent) => { self.addEventListener("message", async (e: MessageEvent) => {
if (e.data.config) { if (e.data.config) {
const config = e.data.config as Message.Config const config = e.data.config as Message.Config
renderer = new Renderer(config) renderer = new Renderer(config)
decoder = new Decoder(renderer) decoder = new Decoder(renderer)
} else if (e.data.init) { } else if (e.data.init) {
const init = e.data.init as Message.Init const init = e.data.init as Message.Init
await decoder.receiveInit(init) await decoder.receiveInit(init)
} else if (e.data.segment) { } else if (e.data.segment) {
const segment = e.data.segment as Message.Segment const segment = e.data.segment as Message.Segment
await decoder.receiveSegment(segment) await decoder.receiveSegment(segment)
} else if (e.data.play) { } else if (e.data.play) {
const play = e.data.play as Message.Play const play = e.data.play as Message.Play
await renderer.play(play) await renderer.play(play)
} }
}) })

View File

@ -7,51 +7,47 @@ import * as Message from "./message"
import { Ring } from "./ring" import { Ring } from "./ring"
class Renderer extends AudioWorkletProcessor { class Renderer extends AudioWorkletProcessor {
ring?: Ring ring?: Ring
base: number base: number
constructor(_params: AudioWorkletNodeOptions) { constructor(_params: AudioWorkletNodeOptions) {
// The super constructor call is required. // The super constructor call is required.
super() super()
this.base = 0 this.base = 0
this.port.onmessage = this.onMessage.bind(this) this.port.onmessage = this.onMessage.bind(this)
} }
onMessage(e: MessageEvent) { onMessage(e: MessageEvent) {
if (e.data.play) { if (e.data.play) {
this.onPlay(e.data.play) this.onPlay(e.data.play)
} }
} }
onPlay(play: Message.Play) { onPlay(play: Message.Play) {
this.ring = new Ring(play.buffer) this.ring = new Ring(play.buffer)
} }
// Inputs and outputs in groups of 128 samples. // Inputs and outputs in groups of 128 samples.
process( process(inputs: Float32Array[][], outputs: Float32Array[][], _parameters: Record<string, Float32Array>): boolean {
inputs: Float32Array[][], if (!this.ring) {
outputs: Float32Array[][], // Paused
_parameters: Record<string, Float32Array> return true
): boolean { }
if (!this.ring) {
// Paused
return true
}
if (inputs.length != 1 && outputs.length != 1) { if (inputs.length != 1 && outputs.length != 1) {
throw new Error("only a single track is supported") throw new Error("only a single track is supported")
} }
const output = outputs[0] const output = outputs[0]
const size = this.ring.read(output) const size = this.ring.read(output)
if (size < output.length) { if (size < output.length) {
// TODO trigger rebuffering event // TODO trigger rebuffering event
} }
return true return true
} }
} }
registerProcessor("renderer", Renderer) registerProcessor("renderer", Renderer)

View File

@ -1,210 +1,195 @@
// Reader wraps a stream and provides convience methods for reading pieces from a stream // Reader wraps a stream and provides convience methods for reading pieces from a stream
export default class Reader { export default class Reader {
reader: ReadableStream reader: ReadableStream
buffer: Uint8Array buffer: Uint8Array
constructor(reader: ReadableStream, buffer: Uint8Array = new Uint8Array(0)) { constructor(reader: ReadableStream, buffer: Uint8Array = new Uint8Array(0)) {
this.reader = reader this.reader = reader
this.buffer = buffer this.buffer = buffer
} }
// Returns any number of bytes // Returns any number of bytes
async read(): Promise<Uint8Array | undefined> { async read(): Promise<Uint8Array | undefined> {
if (this.buffer.byteLength) { if (this.buffer.byteLength) {
const buffer = this.buffer const buffer = this.buffer
this.buffer = new Uint8Array() this.buffer = new Uint8Array()
return buffer return buffer
} }
const r = this.reader.getReader() const r = this.reader.getReader()
const result = await r.read() const result = await r.read()
r.releaseLock() r.releaseLock()
return result.value return result.value
} }
async readAll(): Promise<Uint8Array> { async readAll(): Promise<Uint8Array> {
const r = this.reader.getReader() const r = this.reader.getReader()
for (;;) { for (;;) {
const result = await r.read() const result = await r.read()
if (result.done) { if (result.done) {
break break
} }
const buffer = new Uint8Array(result.value) const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) { if (this.buffer.byteLength == 0) {
this.buffer = buffer this.buffer = buffer
} else { } else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength) const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer) temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength) temp.set(buffer, this.buffer.byteLength)
this.buffer = temp this.buffer = temp
} }
} }
const result = this.buffer const result = this.buffer
this.buffer = new Uint8Array() this.buffer = new Uint8Array()
r.releaseLock() r.releaseLock()
return result return result
} }
async bytes(size: number): Promise<Uint8Array> { async bytes(size: number): Promise<Uint8Array> {
const r = this.reader.getReader() const r = this.reader.getReader()
while (this.buffer.byteLength < size) { while (this.buffer.byteLength < size) {
const result = await r.read() const result = await r.read()
if (result.done) { if (result.done) {
throw "short buffer" throw "short buffer"
} }
const buffer = new Uint8Array(result.value) const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) { if (this.buffer.byteLength == 0) {
this.buffer = buffer this.buffer = buffer
} else { } else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength) const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer) temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength) temp.set(buffer, this.buffer.byteLength)
this.buffer = temp this.buffer = temp
} }
} }
const result = new Uint8Array( const result = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset, size)
this.buffer.buffer, this.buffer = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + size)
this.buffer.byteOffset,
size
)
this.buffer = new Uint8Array(
this.buffer.buffer,
this.buffer.byteOffset + size
)
r.releaseLock() r.releaseLock()
return result return result
} }
async peek(size: number): Promise<Uint8Array> { async peek(size: number): Promise<Uint8Array> {
const r = this.reader.getReader() const r = this.reader.getReader()
while (this.buffer.byteLength < size) { while (this.buffer.byteLength < size) {
const result = await r.read() const result = await r.read()
if (result.done) { if (result.done) {
throw "short buffer" throw "short buffer"
} }
const buffer = new Uint8Array(result.value) const buffer = new Uint8Array(result.value)
if (this.buffer.byteLength == 0) { if (this.buffer.byteLength == 0) {
this.buffer = buffer this.buffer = buffer
} else { } else {
const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength) const temp = new Uint8Array(this.buffer.byteLength + buffer.byteLength)
temp.set(this.buffer) temp.set(this.buffer)
temp.set(buffer, this.buffer.byteLength) temp.set(buffer, this.buffer.byteLength)
this.buffer = temp this.buffer = temp
} }
} }
const result = new Uint8Array( const result = new Uint8Array(this.buffer.buffer, this.buffer.byteOffset, size)
this.buffer.buffer,
this.buffer.byteOffset,
size
)
r.releaseLock() r.releaseLock()
return result return result
} }
async view(size: number): Promise<DataView> { async view(size: number): Promise<DataView> {
const buf = await this.bytes(size) const buf = await this.bytes(size)
return new DataView(buf.buffer, buf.byteOffset, buf.byteLength) return new DataView(buf.buffer, buf.byteOffset, buf.byteLength)
} }
async uint8(): Promise<number> { async uint8(): Promise<number> {
const view = await this.view(1) const view = await this.view(1)
return view.getUint8(0) return view.getUint8(0)
} }
async uint16(): Promise<number> { async uint16(): Promise<number> {
const view = await this.view(2) const view = await this.view(2)
return view.getUint16(0) return view.getUint16(0)
} }
async uint32(): Promise<number> { async uint32(): Promise<number> {
const view = await this.view(4) const view = await this.view(4)
return view.getUint32(0) return view.getUint32(0)
} }
// Returns a Number using 52-bits, the max Javascript can use for integer math // Returns a Number using 52-bits, the max Javascript can use for integer math
async uint52(): Promise<number> { async uint52(): Promise<number> {
const v = await this.uint64() const v = await this.uint64()
if (v > Number.MAX_SAFE_INTEGER) { if (v > Number.MAX_SAFE_INTEGER) {
throw "overflow" throw "overflow"
} }
return Number(v) return Number(v)
} }
// Returns a Number using 52-bits, the max Javascript can use for integer math // Returns a Number using 52-bits, the max Javascript can use for integer math
async vint52(): Promise<number> { async vint52(): Promise<number> {
const v = await this.vint64() const v = await this.vint64()
if (v > Number.MAX_SAFE_INTEGER) { if (v > Number.MAX_SAFE_INTEGER) {
throw "overflow" throw "overflow"
} }
return Number(v) return Number(v)
} }
// NOTE: Returns a BigInt instead of a Number // NOTE: Returns a BigInt instead of a Number
async uint64(): Promise<bigint> { async uint64(): Promise<bigint> {
const view = await this.view(8) const view = await this.view(8)
return view.getBigUint64(0) return view.getBigUint64(0)
} }
// NOTE: Returns a BigInt instead of a Number // NOTE: Returns a BigInt instead of a Number
async vint64(): Promise<bigint> { async vint64(): Promise<bigint> {
const peek = await this.peek(1) const peek = await this.peek(1)
const first = new DataView( const first = new DataView(peek.buffer, peek.byteOffset, peek.byteLength).getUint8(0)
peek.buffer, const size = (first & 0xc0) >> 6
peek.byteOffset,
peek.byteLength
).getUint8(0)
const size = (first & 0xc0) >> 6
switch (size) { switch (size) {
case 0: { case 0: {
const v = await this.uint8() const v = await this.uint8()
return BigInt(v) & 0x3fn return BigInt(v) & 0x3fn
} }
case 1: { case 1: {
const v = await this.uint16() const v = await this.uint16()
return BigInt(v) & 0x3fffn return BigInt(v) & 0x3fffn
} }
case 2: { case 2: {
const v = await this.uint32() const v = await this.uint32()
return BigInt(v) & 0x3fffffffn return BigInt(v) & 0x3fffffffn
} }
case 3: { case 3: {
const v = await this.uint64() const v = await this.uint64()
return v & 0x3fffffffffffffffn return v & 0x3fffffffffffffffn
} }
default: default:
throw "impossible" throw "impossible"
} }
} }
async done(): Promise<boolean> { async done(): Promise<boolean> {
try { try {
await this.peek(1) await this.peek(1)
return false return false
} catch (err) { } catch (err) {
return true // Assume EOF return true // Assume EOF
} }
} }
} }

View File

@ -1,100 +1,100 @@
// Writer wraps a stream and writes chunks of data // Writer wraps a stream and writes chunks of data
export default class Writer { export default class Writer {
buffer: ArrayBuffer buffer: ArrayBuffer
writer: WritableStreamDefaultWriter writer: WritableStreamDefaultWriter
constructor(stream: WritableStream) { constructor(stream: WritableStream) {
this.buffer = new ArrayBuffer(8) this.buffer = new ArrayBuffer(8)
this.writer = stream.getWriter() this.writer = stream.getWriter()
} }
release() { release() {
this.writer.releaseLock() this.writer.releaseLock()
} }
async close() { async close() {
return this.writer.close() return this.writer.close()
} }
async uint8(v: number) { async uint8(v: number) {
const view = new DataView(this.buffer, 0, 1) const view = new DataView(this.buffer, 0, 1)
view.setUint8(0, v) view.setUint8(0, v)
return this.writer.write(view) return this.writer.write(view)
} }
async uint16(v: number) { async uint16(v: number) {
const view = new DataView(this.buffer, 0, 2) const view = new DataView(this.buffer, 0, 2)
view.setUint16(0, v) view.setUint16(0, v)
return this.writer.write(view) return this.writer.write(view)
} }
async uint24(v: number) { async uint24(v: number) {
const v1 = (v >> 16) & 0xff const v1 = (v >> 16) & 0xff
const v2 = (v >> 8) & 0xff const v2 = (v >> 8) & 0xff
const v3 = v & 0xff const v3 = v & 0xff
const view = new DataView(this.buffer, 0, 3) const view = new DataView(this.buffer, 0, 3)
view.setUint8(0, v1) view.setUint8(0, v1)
view.setUint8(1, v2) view.setUint8(1, v2)
view.setUint8(2, v3) view.setUint8(2, v3)
return this.writer.write(view) return this.writer.write(view)
} }
async uint32(v: number) { async uint32(v: number) {
const view = new DataView(this.buffer, 0, 4) const view = new DataView(this.buffer, 0, 4)
view.setUint32(0, v) view.setUint32(0, v)
return this.writer.write(view) return this.writer.write(view)
} }
async uint52(v: number) { async uint52(v: number) {
if (v > Number.MAX_SAFE_INTEGER) { if (v > Number.MAX_SAFE_INTEGER) {
throw "value too large" throw "value too large"
} }
this.uint64(BigInt(v)) this.uint64(BigInt(v))
} }
async vint52(v: number) { async vint52(v: number) {
if (v > Number.MAX_SAFE_INTEGER) { if (v > Number.MAX_SAFE_INTEGER) {
throw "value too large" throw "value too large"
} }
if (v < 1 << 6) { if (v < 1 << 6) {
return this.uint8(v) return this.uint8(v)
} else if (v < 1 << 14) { } else if (v < 1 << 14) {
return this.uint16(v | 0x4000) return this.uint16(v | 0x4000)
} else if (v < 1 << 30) { } else if (v < 1 << 30) {
return this.uint32(v | 0x80000000) return this.uint32(v | 0x80000000)
} else { } else {
return this.uint64(BigInt(v) | 0xc000000000000000n) return this.uint64(BigInt(v) | 0xc000000000000000n)
} }
} }
async uint64(v: bigint) { async uint64(v: bigint) {
const view = new DataView(this.buffer, 0, 8) const view = new DataView(this.buffer, 0, 8)
view.setBigUint64(0, v) view.setBigUint64(0, v)
return this.writer.write(view) return this.writer.write(view)
} }
async vint64(v: bigint) { async vint64(v: bigint) {
if (v < 1 << 6) { if (v < 1 << 6) {
return this.uint8(Number(v)) return this.uint8(Number(v))
} else if (v < 1 << 14) { } else if (v < 1 << 14) {
return this.uint16(Number(v) | 0x4000) return this.uint16(Number(v) | 0x4000)
} else if (v < 1 << 30) { } else if (v < 1 << 30) {
return this.uint32(Number(v) | 0x80000000) return this.uint32(Number(v) | 0x80000000)
} else { } else {
return this.uint64(v | 0xc000000000000000n) return this.uint64(v | 0xc000000000000000n)
} }
} }
async bytes(buffer: ArrayBuffer) { async bytes(buffer: ArrayBuffer) {
return this.writer.write(buffer) return this.writer.write(buffer)
} }
async string(str: string) { async string(str: string) {
const data = new TextEncoder().encode(str) const data = new TextEncoder().encode(str)
return this.writer.write(data) return this.writer.write(data)
} }
} }

View File

@ -2,95 +2,95 @@ import * as Stream from "../stream"
import * as Interface from "./interface" import * as Interface from "./interface"
export interface Config { export interface Config {
url: string url: string
fingerprint?: WebTransportHash // the certificate fingerprint, temporarily needed for local development fingerprint?: WebTransportHash // the certificate fingerprint, temporarily needed for local development
} }
export default class Transport { export default class Transport {
quic: Promise<WebTransport> quic: Promise<WebTransport>
api: Promise<WritableStream> api: Promise<WritableStream>
callback?: Interface.Callback callback?: Interface.Callback
constructor(config: Config) { constructor(config: Config) {
this.quic = this.connect(config) this.quic = this.connect(config)
// Create a unidirectional stream for all of our messages // Create a unidirectional stream for all of our messages
this.api = this.quic.then((q) => { this.api = this.quic.then((q) => {
return q.createUnidirectionalStream() return q.createUnidirectionalStream()
}) })
// async functions // async functions
this.receiveStreams() this.receiveStreams()
} }
async close() { async close() {
;(await this.quic).close() ;(await this.quic).close()
} }
// Helper function to make creating a promise easier // Helper function to make creating a promise easier
private async connect(config: Config): Promise<WebTransport> { private async connect(config: Config): Promise<WebTransport> {
const options: WebTransportOptions = {} const options: WebTransportOptions = {}
if (config.fingerprint) { if (config.fingerprint) {
options.serverCertificateHashes = [config.fingerprint] options.serverCertificateHashes = [config.fingerprint]
} }
const quic = new WebTransport(config.url, options) const quic = new WebTransport(config.url, options)
await quic.ready await quic.ready
return quic return quic
} }
async sendMessage(msg: any) { async sendMessage(msg: any) {
const payload = JSON.stringify(msg) const payload = JSON.stringify(msg)
const size = payload.length + 8 const size = payload.length + 8
const stream = await this.api const stream = await this.api
const writer = new Stream.Writer(stream) const writer = new Stream.Writer(stream)
await writer.uint32(size) await writer.uint32(size)
await writer.string("warp") await writer.string("warp")
await writer.string(payload) await writer.string(payload)
writer.release() writer.release()
} }
async receiveStreams() { async receiveStreams() {
const q = await this.quic const q = await this.quic
const streams = q.incomingUnidirectionalStreams.getReader() const streams = q.incomingUnidirectionalStreams.getReader()
for (;;) { for (;;) {
const result = await streams.read() const result = await streams.read()
if (result.done) break if (result.done) break
const stream = result.value const stream = result.value
this.handleStream(stream) // don't await this.handleStream(stream) // don't await
} }
} }
async handleStream(stream: ReadableStream) { async handleStream(stream: ReadableStream) {
const r = new Stream.Reader(stream) const r = new Stream.Reader(stream)
while (!(await r.done())) { while (!(await r.done())) {
const size = await r.uint32() const size = await r.uint32()
const typ = new TextDecoder("utf-8").decode(await r.bytes(4)) const typ = new TextDecoder("utf-8").decode(await r.bytes(4))
if (typ != "warp") throw "expected warp atom" if (typ != "warp") throw "expected warp atom"
if (size < 8) throw "atom too small" if (size < 8) throw "atom too small"
const payload = new TextDecoder("utf-8").decode(await r.bytes(size - 8)) const payload = new TextDecoder("utf-8").decode(await r.bytes(size - 8))
const msg = JSON.parse(payload) const msg = JSON.parse(payload)
if (msg.init) { if (msg.init) {
return this.callback?.onInit({ return this.callback?.onInit({
buffer: r.buffer, buffer: r.buffer,
reader: r.reader, reader: r.reader,
}) })
} else if (msg.segment) { } else if (msg.segment) {
return this.callback?.onSegment({ return this.callback?.onSegment({
buffer: r.buffer, buffer: r.buffer,
reader: r.reader, reader: r.reader,
}) })
} else { } else {
console.warn("unknown message", msg) console.warn("unknown message", msg)
} }
} }
} }
} }

View File

@ -1,14 +1,14 @@
export interface Callback { export interface Callback {
onInit(init: Init): any onInit(init: Init): any
onSegment(segment: Segment): any onSegment(segment: Segment): any
} }
export interface Init { export interface Init {
buffer: Uint8Array // unread buffered data buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data reader: ReadableStream // unread unbuffered data
} }
export interface Segment { export interface Segment {
buffer: Uint8Array // unread buffered data buffer: Uint8Array // unread buffered data
reader: ReadableStream // unread unbuffered data reader: ReadableStream // unread unbuffered data
} }

View File

@ -3,5 +3,5 @@ export type Init = any
export type Segment = any export type Segment = any
export interface Debug { export interface Debug {
max_bitrate: number max_bitrate: number
} }

View File

@ -8,77 +8,77 @@ declare module "webtransport"
*/ */
interface WebTransportDatagramDuplexStream { interface WebTransportDatagramDuplexStream {
readonly readable: ReadableStream readonly readable: ReadableStream
readonly writable: WritableStream readonly writable: WritableStream
readonly maxDatagramSize: number readonly maxDatagramSize: number
incomingMaxAge: number incomingMaxAge: number
outgoingMaxAge: number outgoingMaxAge: number
incomingHighWaterMark: number incomingHighWaterMark: number
outgoingHighWaterMark: number outgoingHighWaterMark: number
} }
interface WebTransport { interface WebTransport {
getStats(): Promise<WebTransportStats> getStats(): Promise<WebTransportStats>
readonly ready: Promise<undefined> readonly ready: Promise<undefined>
readonly closed: Promise<WebTransportCloseInfo> readonly closed: Promise<WebTransportCloseInfo>
close(closeInfo?: WebTransportCloseInfo): undefined close(closeInfo?: WebTransportCloseInfo): undefined
readonly datagrams: WebTransportDatagramDuplexStream readonly datagrams: WebTransportDatagramDuplexStream
createBidirectionalStream(): Promise<WebTransportBidirectionalStream> createBidirectionalStream(): Promise<WebTransportBidirectionalStream>
readonly incomingBidirectionalStreams: ReadableStream readonly incomingBidirectionalStreams: ReadableStream
createUnidirectionalStream(): Promise<WritableStream> createUnidirectionalStream(): Promise<WritableStream>
readonly incomingUnidirectionalStreams: ReadableStream readonly incomingUnidirectionalStreams: ReadableStream
} }
declare const WebTransport: { declare const WebTransport: {
prototype: WebTransport prototype: WebTransport
new (url: string, options?: WebTransportOptions): WebTransport new (url: string, options?: WebTransportOptions): WebTransport
} }
interface WebTransportHash { interface WebTransportHash {
algorithm?: string algorithm?: string
value?: BufferSource value?: BufferSource
} }
interface WebTransportOptions { interface WebTransportOptions {
allowPooling?: boolean allowPooling?: boolean
serverCertificateHashes?: Array<WebTransportHash> serverCertificateHashes?: Array<WebTransportHash>
} }
interface WebTransportCloseInfo { interface WebTransportCloseInfo {
closeCode?: number closeCode?: number
reason?: string reason?: string
} }
interface WebTransportStats { interface WebTransportStats {
timestamp?: DOMHighResTimeStamp timestamp?: DOMHighResTimeStamp
bytesSent?: number bytesSent?: number
packetsSent?: number packetsSent?: number
numOutgoingStreamsCreated?: number numOutgoingStreamsCreated?: number
numIncomingStreamsCreated?: number numIncomingStreamsCreated?: number
bytesReceived?: number bytesReceived?: number
packetsReceived?: number packetsReceived?: number
minRtt?: DOMHighResTimeStamp minRtt?: DOMHighResTimeStamp
numReceivedDatagramsDropped?: number numReceivedDatagramsDropped?: number
} }
interface WebTransportBidirectionalStream { interface WebTransportBidirectionalStream {
readonly readable: ReadableStream readonly readable: ReadableStream
readonly writable: WritableStream readonly writable: WritableStream
} }
interface WebTransportError extends DOMException { interface WebTransportError extends DOMException {
readonly source: WebTransportErrorSource readonly source: WebTransportErrorSource
readonly streamErrorCode: number readonly streamErrorCode: number
} }
declare const WebTransportError: { declare const WebTransportError: {
prototype: WebTransportError prototype: WebTransportError
new (init?: WebTransportErrorInit): WebTransportError new (init?: WebTransportErrorInit): WebTransportError
} }
interface WebTransportErrorInit { interface WebTransportErrorInit {
streamErrorCode?: number streamErrorCode?: number
message?: string message?: string
} }
type WebTransportErrorSource = "stream" | "session" type WebTransportErrorSource = "stream" | "session"

View File

@ -1,20 +1,20 @@
export default class Deferred<T> { export default class Deferred<T> {
promise: Promise<T> promise: Promise<T>
resolve: (value: T | PromiseLike<T>) => void resolve: (value: T | PromiseLike<T>) => void
reject: (value: T | PromiseLike<T>) => void reject: (value: T | PromiseLike<T>) => void
constructor() { constructor() {
// Set initial values so TS stops being annoying. // Set initial values so TS stops being annoying.
this.resolve = (_value: T | PromiseLike<T>) => { this.resolve = (_value: T | PromiseLike<T>) => {
/* noop */ /* noop */
} }
this.reject = (_value: T | PromiseLike<T>) => { this.reject = (_value: T | PromiseLike<T>) => {
/* noop */ /* noop */
} }
this.promise = new Promise((resolve, reject) => { this.promise = new Promise((resolve, reject) => {
this.resolve = resolve this.resolve = resolve
this.reject = reject this.reject = reject
}) })
} }
} }

View File

@ -1,9 +1,9 @@
{ {
"include": ["src/**/*"], "include": ["src/**/*"],
"compilerOptions": { "compilerOptions": {
"target": "es2022", "target": "es2022",
"module": "es2022", "module": "es2022",
"moduleResolution": "node", "moduleResolution": "node",
"strict": true "strict": true
} }
} }

View File

@ -1265,6 +1265,13 @@ eslint-config-prettier@^8.8.0:
resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz#bfda738d412adc917fd7b038857110efe98c9348" resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz#bfda738d412adc917fd7b038857110efe98c9348"
integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA== integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==
eslint-plugin-prettier@^4.2.1:
version "4.2.1"
resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz#651cbb88b1dab98bfd42f017a12fa6b2d993f94b"
integrity sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==
dependencies:
prettier-linter-helpers "^1.0.0"
eslint-scope@^5.1.1: eslint-scope@^5.1.1:
version "5.1.1" version "5.1.1"
resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz"
@ -1374,6 +1381,11 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz"
integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
fast-diff@^1.1.2:
version "1.3.0"
resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0"
integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==
fast-glob@^3.2.9: fast-glob@^3.2.9:
version "3.2.12" version "3.2.12"
resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz" resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz"
@ -1989,6 +2001,13 @@ prelude-ls@^1.2.1:
resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz"
integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
prettier-linter-helpers@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b"
integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==
dependencies:
fast-diff "^1.1.2"
prettier@^2.8.8: prettier@^2.8.8:
version "2.8.8" version "2.8.8"
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da"