2023-05-09 20:24:14 +00:00
|
|
|
use std::collections::{HashMap, VecDeque};
|
2023-05-05 02:43:43 +00:00
|
|
|
use std::io::Read;
|
2023-05-09 20:24:14 +00:00
|
|
|
use std::{fs, io, time};
|
2023-04-24 17:18:55 +00:00
|
|
|
|
|
|
|
use anyhow;
|
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
use mp4;
|
|
|
|
use mp4::ReadBox;
|
2023-04-24 17:18:55 +00:00
|
|
|
|
|
|
|
pub struct Source {
|
2023-05-02 18:05:05 +00:00
|
|
|
// We read the file once, in order, and don't seek backwards.
|
2023-04-24 17:18:55 +00:00
|
|
|
reader: io::BufReader<fs::File>,
|
2023-04-24 20:07:06 +00:00
|
|
|
|
2023-05-02 18:05:05 +00:00
|
|
|
// The timestamp when the broadcast "started", so we can sleep to simulate a live stream.
|
2023-04-24 20:07:06 +00:00
|
|
|
start: time::Instant,
|
2023-05-02 18:05:05 +00:00
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
// The initialization payload; ftyp + moov boxes.
|
|
|
|
pub init: Vec<u8>,
|
2023-05-02 18:05:05 +00:00
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
// The timescale used for each track.
|
2023-05-09 20:24:14 +00:00
|
|
|
timescales: HashMap<u32, u32>,
|
2023-05-05 02:43:43 +00:00
|
|
|
|
|
|
|
// Any fragments parsed and ready to be returned by next().
|
|
|
|
fragments: VecDeque<Fragment>,
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Fragment {
|
2023-05-02 18:05:05 +00:00
|
|
|
// The track ID for the fragment.
|
2023-05-05 02:43:43 +00:00
|
|
|
pub track_id: u32,
|
2023-05-02 18:05:05 +00:00
|
|
|
|
|
|
|
// The data of the fragment.
|
2023-04-24 17:18:55 +00:00
|
|
|
pub data: Vec<u8>,
|
2023-05-02 18:05:05 +00:00
|
|
|
|
|
|
|
// Whether this fragment is a keyframe.
|
2023-04-24 18:45:46 +00:00
|
|
|
pub keyframe: bool,
|
2023-05-02 18:05:05 +00:00
|
|
|
|
|
|
|
// The timestamp of the fragment, in milliseconds, to simulate a live stream.
|
2023-05-09 16:29:39 +00:00
|
|
|
pub timestamp: u64,
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Source {
|
2023-05-05 02:43:43 +00:00
|
|
|
pub fn new(path: &str) -> anyhow::Result<Self> {
|
2023-04-24 17:18:55 +00:00
|
|
|
let f = fs::File::open(path)?;
|
2023-05-05 02:43:43 +00:00
|
|
|
let mut reader = io::BufReader::new(f);
|
2023-04-24 17:18:55 +00:00
|
|
|
let start = time::Instant::now();
|
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
let ftyp = read_atom(&mut reader)?;
|
|
|
|
anyhow::ensure!(&ftyp[4..8] == b"ftyp", "expected ftyp atom");
|
|
|
|
|
|
|
|
let moov = read_atom(&mut reader)?;
|
|
|
|
anyhow::ensure!(&moov[4..8] == b"moov", "expected moov atom");
|
|
|
|
|
|
|
|
let mut init = ftyp;
|
|
|
|
init.extend(&moov);
|
|
|
|
|
|
|
|
// We're going to parse the moov box.
|
|
|
|
// We have to read the moov box header to correctly advance the cursor for the mp4 crate.
|
|
|
|
let mut moov_reader = io::Cursor::new(&moov);
|
|
|
|
let moov_header = mp4::BoxHeader::read(&mut moov_reader)?;
|
|
|
|
|
|
|
|
// Parse the moov box so we can detect the timescales for each track.
|
|
|
|
let moov = mp4::MoovBox::read_box(&mut moov_reader, moov_header.size)?;
|
|
|
|
|
2023-05-09 20:24:14 +00:00
|
|
|
Ok(Self {
|
2023-04-24 17:18:55 +00:00
|
|
|
reader,
|
|
|
|
start,
|
2023-05-05 02:43:43 +00:00
|
|
|
init,
|
2023-05-09 20:24:14 +00:00
|
|
|
timescales: timescales(&moov),
|
2023-05-02 18:05:05 +00:00
|
|
|
fragments: VecDeque::new(),
|
2023-04-24 17:18:55 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
pub fn fragment(&mut self) -> anyhow::Result<Option<Fragment>> {
|
2023-05-02 18:05:05 +00:00
|
|
|
if self.fragments.is_empty() {
|
|
|
|
self.parse()?;
|
2023-04-24 17:18:55 +00:00
|
|
|
};
|
|
|
|
|
2023-04-24 20:07:06 +00:00
|
|
|
if self.timeout().is_some() {
|
2023-05-02 18:05:21 +00:00
|
|
|
return Ok(None);
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
2023-05-02 18:05:05 +00:00
|
|
|
Ok(self.fragments.pop_front())
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
2023-05-02 18:05:05 +00:00
|
|
|
fn parse(&mut self) -> anyhow::Result<()> {
|
|
|
|
loop {
|
2023-05-05 02:43:43 +00:00
|
|
|
let atom = read_atom(&mut self.reader)?;
|
2023-05-02 18:05:05 +00:00
|
|
|
|
|
|
|
let mut reader = io::Cursor::new(&atom);
|
|
|
|
let header = mp4::BoxHeader::read(&mut reader)?;
|
|
|
|
|
|
|
|
match header.name {
|
2023-05-09 20:24:14 +00:00
|
|
|
mp4::BoxType::FtypBox | mp4::BoxType::MoovBox => {
|
|
|
|
anyhow::bail!("must call init first")
|
|
|
|
}
|
2023-05-02 18:05:05 +00:00
|
|
|
mp4::BoxType::MoofBox => {
|
|
|
|
let moof = mp4::MoofBox::read_box(&mut reader, header.size)?;
|
|
|
|
|
|
|
|
if moof.trafs.len() != 1 {
|
|
|
|
// We can't split the mdat atom, so this is impossible to support
|
|
|
|
anyhow::bail!("multiple tracks per moof atom")
|
|
|
|
}
|
|
|
|
|
2023-05-02 18:05:21 +00:00
|
|
|
self.fragments.push_back(Fragment {
|
2023-05-05 02:43:43 +00:00
|
|
|
track_id: moof.trafs[0].tfhd.track_id,
|
2023-05-02 18:05:05 +00:00
|
|
|
data: atom,
|
|
|
|
keyframe: has_keyframe(&moof),
|
2023-05-09 16:29:39 +00:00
|
|
|
timestamp: first_timestamp(&moof).expect("couldn't find timestamp"),
|
2023-05-02 18:05:05 +00:00
|
|
|
})
|
2023-05-02 18:05:21 +00:00
|
|
|
}
|
2023-05-02 18:05:05 +00:00
|
|
|
mp4::BoxType::MdatBox => {
|
|
|
|
let moof = self.fragments.back().expect("no atom before mdat");
|
|
|
|
|
2023-05-02 18:05:21 +00:00
|
|
|
self.fragments.push_back(Fragment {
|
2023-05-05 02:43:43 +00:00
|
|
|
track_id: moof.track_id,
|
2023-05-02 18:05:05 +00:00
|
|
|
data: atom,
|
|
|
|
keyframe: false,
|
2023-05-09 16:29:39 +00:00
|
|
|
timestamp: moof.timestamp,
|
2023-05-02 18:05:05 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
// We have some media data, return so we can start sending it.
|
2023-05-02 18:05:21 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
2023-05-05 02:43:43 +00:00
|
|
|
_ => {
|
|
|
|
// Skip unknown atoms
|
|
|
|
}
|
2023-04-24 20:07:06 +00:00
|
|
|
}
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
}
|
2023-04-24 20:07:06 +00:00
|
|
|
|
|
|
|
// Simulate a live stream by sleeping until the next timestamp in the media.
|
|
|
|
pub fn timeout(&self) -> Option<time::Duration> {
|
2023-05-02 18:05:05 +00:00
|
|
|
let next = self.fragments.front()?;
|
2023-05-09 16:29:39 +00:00
|
|
|
let timestamp = next.timestamp;
|
2023-05-02 18:05:05 +00:00
|
|
|
|
|
|
|
// Find the timescale for the track.
|
2023-05-09 20:24:14 +00:00
|
|
|
let timescale = self.timescales.get(&next.track_id).unwrap();
|
2023-04-24 20:07:06 +00:00
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
let delay = time::Duration::from_millis(1000 * timestamp / *timescale as u64);
|
2023-04-24 20:07:06 +00:00
|
|
|
let elapsed = self.start.elapsed();
|
|
|
|
|
|
|
|
delay.checked_sub(elapsed)
|
|
|
|
}
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Read a full MP4 atom into a vector.
|
2023-05-05 02:43:43 +00:00
|
|
|
pub fn read_atom<R: Read>(reader: &mut R) -> anyhow::Result<Vec<u8>> {
|
2023-04-24 17:18:55 +00:00
|
|
|
// Read the 8 bytes for the size + type
|
|
|
|
let mut buf = [0u8; 8];
|
|
|
|
reader.read_exact(&mut buf)?;
|
|
|
|
|
|
|
|
// Convert the first 4 bytes into the size.
|
|
|
|
let size = u32::from_be_bytes(buf[0..4].try_into()?) as u64;
|
2023-05-05 02:43:43 +00:00
|
|
|
//let typ = &buf[4..8].try_into().ok().unwrap();
|
|
|
|
|
|
|
|
let mut raw = buf.to_vec();
|
2023-04-24 17:18:55 +00:00
|
|
|
|
|
|
|
let mut limit = match size {
|
|
|
|
// Runs until the end of the file.
|
|
|
|
0 => reader.take(u64::MAX),
|
|
|
|
|
|
|
|
// The next 8 bytes are the extended size to be used instead.
|
|
|
|
1 => {
|
|
|
|
reader.read_exact(&mut buf)?;
|
|
|
|
let size_large = u64::from_be_bytes(buf);
|
2023-05-02 18:05:21 +00:00
|
|
|
anyhow::ensure!(
|
|
|
|
size_large >= 16,
|
|
|
|
"impossible extended box size: {}",
|
|
|
|
size_large
|
|
|
|
);
|
2023-04-24 17:18:55 +00:00
|
|
|
|
|
|
|
reader.take(size_large - 16)
|
2023-05-02 18:05:21 +00:00
|
|
|
}
|
2023-04-24 17:18:55 +00:00
|
|
|
|
|
|
|
2..=7 => {
|
|
|
|
anyhow::bail!("impossible box size: {}", size)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise read based on the size.
|
2023-05-02 18:05:21 +00:00
|
|
|
size => reader.take(size - 8),
|
2023-04-24 17:18:55 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Append to the vector and return it.
|
2023-05-05 02:43:43 +00:00
|
|
|
limit.read_to_end(&mut raw)?;
|
2023-04-24 17:18:55 +00:00
|
|
|
|
2023-05-05 02:43:43 +00:00
|
|
|
Ok(raw)
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn has_keyframe(moof: &mp4::MoofBox) -> bool {
|
|
|
|
for traf in &moof.trafs {
|
|
|
|
// TODO trak default flags if this is None
|
|
|
|
let default_flags = traf.tfhd.default_sample_flags.unwrap_or_default();
|
|
|
|
let trun = match &traf.trun {
|
|
|
|
Some(t) => t,
|
|
|
|
None => return false,
|
|
|
|
};
|
|
|
|
|
|
|
|
for i in 0..trun.sample_count {
|
|
|
|
let mut flags = match trun.sample_flags.get(i as usize) {
|
|
|
|
Some(f) => *f,
|
|
|
|
None => default_flags,
|
|
|
|
};
|
|
|
|
|
|
|
|
if i == 0 && trun.first_sample_flags.is_some() {
|
|
|
|
flags = trun.first_sample_flags.unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
// https://chromium.googlesource.com/chromium/src/media/+/master/formats/mp4/track_run_iterator.cc#177
|
|
|
|
let keyframe = (flags >> 24) & 0x3 == 0x2; // kSampleDependsOnNoOther
|
|
|
|
let non_sync = (flags >> 16) & 0x1 == 0x1; // kSampleIsNonSyncSample
|
|
|
|
|
2023-04-24 18:45:46 +00:00
|
|
|
if keyframe && !non_sync {
|
2023-05-02 18:05:21 +00:00
|
|
|
return true;
|
2023-04-24 17:18:55 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
2023-04-24 20:07:06 +00:00
|
|
|
fn first_timestamp(moof: &mp4::MoofBox) -> Option<u64> {
|
|
|
|
Some(moof.trafs.first()?.tfdt.as_ref()?.base_media_decode_time)
|
2023-05-09 20:24:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn timescales(moov: &mp4::MoovBox) -> HashMap<u32, u32> {
|
|
|
|
moov.traks
|
|
|
|
.iter()
|
|
|
|
.map(|trak| (trak.tkhd.track_id, trak.mdia.mdhd.timescale))
|
|
|
|
.collect()
|
|
|
|
}
|