Add stream timestamp implementation to WASAPI backend

This was written on Linux and while the code type-checks, it has not yet
been tested.
This commit is contained in:
mitchmindtree 2020-04-29 14:59:25 +02:00
parent 777a6b2bd1
commit cdf23c3c62
3 changed files with 128 additions and 7 deletions

View File

@ -22,7 +22,7 @@ hound = "3.4"
ringbuf = "0.1.6" ringbuf = "0.1.6"
[target.'cfg(target_os = "windows")'.dependencies] [target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "std", "synchapi", "winbase", "winuser"] } winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "profileapi", "std", "synchapi", "winbase", "winuser"] }
asio-sys = { version = "0.1", path = "asio-sys", optional = true } asio-sys = { version = "0.1", path = "asio-sys", optional = true }
parking_lot = "0.9" parking_lot = "0.9"

View File

@ -42,8 +42,7 @@ use super::winapi::um::mmdeviceapi::{
eAll, eCapture, eConsole, eRender, CLSID_MMDeviceEnumerator, EDataFlow, IMMDevice, eAll, eCapture, eConsole, eRender, CLSID_MMDeviceEnumerator, EDataFlow, IMMDevice,
IMMDeviceCollection, IMMDeviceEnumerator, IMMEndpoint, DEVICE_STATE_ACTIVE, IMMDeviceCollection, IMMDeviceEnumerator, IMMEndpoint, DEVICE_STATE_ACTIVE,
}; };
use super::winapi::um::winnt::LPWSTR; use super::winapi::um::winnt::{LPWSTR, WCHAR};
use super::winapi::um::winnt::WCHAR;
use super::{ use super::{
stream::{AudioClientFlow, Stream, StreamInner}, stream::{AudioClientFlow, Stream, StreamInner},
@ -750,13 +749,20 @@ impl Device {
// `run()` method and added to the `RunContext`. // `run()` method and added to the `RunContext`.
let client_flow = AudioClientFlow::Capture { capture_client }; let client_flow = AudioClientFlow::Capture { capture_client };
let audio_clock = get_audio_clock(audio_client).map_err(|err| {
(*audio_client).Release();
err
})?;
Ok(StreamInner { Ok(StreamInner {
audio_client, audio_client,
audio_clock,
client_flow, client_flow,
event, event,
playing: false, playing: false,
max_frames_in_buffer, max_frames_in_buffer,
bytes_per_frame: waveformatex.nBlockAlign, bytes_per_frame: waveformatex.nBlockAlign,
config: config.clone(),
sample_format, sample_format,
}) })
} }
@ -895,13 +901,20 @@ impl Device {
// `run()` method and added to the `RunContext`. // `run()` method and added to the `RunContext`.
let client_flow = AudioClientFlow::Render { render_client }; let client_flow = AudioClientFlow::Render { render_client };
let audio_clock = get_audio_clock(audio_client).map_err(|err| {
(*audio_client).Release();
err
})?;
Ok(StreamInner { Ok(StreamInner {
audio_client, audio_client,
audio_clock,
client_flow, client_flow,
event, event,
playing: false, playing: false,
max_frames_in_buffer, max_frames_in_buffer,
bytes_per_frame: waveformatex.nBlockAlign, bytes_per_frame: waveformatex.nBlockAlign,
config: config.clone(),
sample_format, sample_format,
}) })
} }
@ -1147,6 +1160,29 @@ pub fn default_output_device() -> Option<Device> {
default_device(eRender) default_device(eRender)
} }
/// Get the audio clock used to produce `StreamInstant`s.
unsafe fn get_audio_clock(
audio_client: *mut audioclient::IAudioClient,
) -> Result<*mut audioclient::IAudioClock, BuildStreamError> {
let mut audio_clock: *mut audioclient::IAudioClock = ptr::null_mut();
let hresult = (*audio_client).GetService(
&audioclient::IID_IAudioClock,
&mut audio_clock as *mut *mut audioclient::IAudioClock as *mut _,
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
let description = format!("failed to build audio clock: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
Ok(audio_clock)
}
// Turns a `Format` into a `WAVEFORMATEXTENSIBLE`. // Turns a `Format` into a `WAVEFORMATEXTENSIBLE`.
// //
// Returns `None` if the WAVEFORMATEXTENSIBLE does not support the given format. // Returns `None` if the WAVEFORMATEXTENSIBLE does not support the given format.

View File

@ -1,5 +1,5 @@
use super::check_result; use super::check_result;
use super::winapi::shared::basetsd::UINT32; use super::winapi::shared::basetsd::{UINT32, UINT64};
use super::winapi::shared::minwindef::{BYTE, FALSE, WORD}; use super::winapi::shared::minwindef::{BYTE, FALSE, WORD};
use super::winapi::um::audioclient::{self, AUDCLNT_E_DEVICE_INVALIDATED, AUDCLNT_S_BUFFER_EMPTY}; use super::winapi::um::audioclient::{self, AUDCLNT_E_DEVICE_INVALIDATED, AUDCLNT_S_BUFFER_EMPTY};
use super::winapi::um::handleapi; use super::winapi::um::handleapi;
@ -64,6 +64,7 @@ pub enum AudioClientFlow {
pub struct StreamInner { pub struct StreamInner {
pub audio_client: *mut audioclient::IAudioClient, pub audio_client: *mut audioclient::IAudioClient,
pub audio_clock: *mut audioclient::IAudioClock,
pub client_flow: AudioClientFlow, pub client_flow: AudioClientFlow,
// Event that is signalled by WASAPI whenever audio data must be written. // Event that is signalled by WASAPI whenever audio data must be written.
pub event: winnt::HANDLE, pub event: winnt::HANDLE,
@ -73,6 +74,8 @@ pub struct StreamInner {
pub max_frames_in_buffer: UINT32, pub max_frames_in_buffer: UINT32,
// Number of bytes that each frame occupies. // Number of bytes that each frame occupies.
pub bytes_per_frame: WORD, pub bytes_per_frame: WORD,
// The configuration with which the stream was created.
pub config: crate::StreamConfig,
// The sample format with which the stream was created. // The sample format with which the stream was created.
pub sample_format: SampleFormat, pub sample_format: SampleFormat,
} }
@ -185,6 +188,7 @@ impl Drop for StreamInner {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
(*self.audio_client).Release(); (*self.audio_client).Release();
(*self.audio_clock).Release();
handleapi::CloseHandle(self.event); handleapi::CloseHandle(self.event);
} }
} }
@ -388,12 +392,13 @@ fn process_input(
if frames_available == 0 { if frames_available == 0 {
return ControlFlow::Continue; return ControlFlow::Continue;
} }
let mut qpc_position: UINT64 = 0;
let hresult = (*capture_client).GetBuffer( let hresult = (*capture_client).GetBuffer(
&mut buffer, &mut buffer,
&mut frames_available, &mut frames_available,
flags.as_mut_ptr(), flags.as_mut_ptr(),
ptr::null_mut(), ptr::null_mut(),
ptr::null_mut(), &mut qpc_position,
); );
// TODO: Can this happen? // TODO: Can this happen?
@ -410,7 +415,16 @@ fn process_input(
let len = frames_available as usize * stream.bytes_per_frame as usize let len = frames_available as usize * stream.bytes_per_frame as usize
/ stream.sample_format.sample_size(); / stream.sample_format.sample_size();
let data = Data::from_parts(data, len, stream.sample_format); let data = Data::from_parts(data, len, stream.sample_format);
let info = InputCallbackInfo {};
// The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds.
let timestamp = match input_timestamp(stream, qpc_position) {
Ok(ts) => ts,
Err(err) => {
error_callback(err);
return ControlFlow::Break;
}
};
let info = InputCallbackInfo { timestamp };
data_callback(&data, &info); data_callback(&data, &info);
// Release the buffer. // Release the buffer.
@ -455,7 +469,15 @@ fn process_output(
let len = frames_available as usize * stream.bytes_per_frame as usize let len = frames_available as usize * stream.bytes_per_frame as usize
/ stream.sample_format.sample_size(); / stream.sample_format.sample_size();
let mut data = Data::from_parts(data, len, stream.sample_format); let mut data = Data::from_parts(data, len, stream.sample_format);
let info = OutputCallbackInfo {}; let sample_rate = stream.config.sample_rate;
let timestamp = match output_timestamp(stream, frames_available, sample_rate) {
Ok(ts) => ts,
Err(err) => {
error_callback(err);
return ControlFlow::Break;
}
};
let info = OutputCallbackInfo { timestamp };
data_callback(&mut data, &info); data_callback(&mut data, &info);
let hresult = (*render_client).ReleaseBuffer(frames_available as u32, 0); let hresult = (*render_client).ReleaseBuffer(frames_available as u32, 0);
@ -467,3 +489,66 @@ fn process_output(
ControlFlow::Continue ControlFlow::Continue
} }
/// Convert the given duration in frames at the given sample rate to a `std::time::Duration`.
fn frames_to_duration(frames: u32, rate: crate::SampleRate) -> std::time::Duration {
let secsf = frames as f64 / rate.0 as f64;
let secs = secsf as u64;
let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32;
std::time::Duration::new(secs, nanos)
}
/// Use the stream's `IAudioClock` to produce the current stream instant.
///
/// Uses the QPC position produced via the `GetPosition` method.
fn stream_instant(stream: &StreamInner) -> Result<crate::StreamInstant, StreamError> {
let mut position: UINT64 = 0;
let mut qpc_position: UINT64 = 0;
let res = unsafe { (*stream.audio_clock).GetPosition(&mut position, &mut qpc_position) };
stream_error_from_hresult(res)?;
// The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds.
let qpc_nanos = qpc_position as i128 * 100;
let instant = crate::StreamInstant::from_nanos_i128(qpc_nanos)
.expect("performance counter out of range of `StreamInstant` representation");
Ok(instant)
}
/// Produce the input stream timestamp.
///
/// `buffer_qpc_position` is the `qpc_position` returned via the `GetBuffer` call on the capture
/// client. It represents the instant at which the first sample of the retrieved buffer was
/// captured.
fn input_timestamp(
stream: &StreamInner,
buffer_qpc_position: UINT64,
) -> Result<crate::InputStreamTimestamp, StreamError> {
// The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds.
let qpc_nanos = buffer_qpc_position as i128 * 100;
let capture = crate::StreamInstant::from_nanos_i128(qpc_nanos)
.expect("performance counter out of range of `StreamInstant` representation");
let callback = stream_instant(stream)?;
Ok(crate::InputStreamTimestamp { capture, callback })
}
/// Produce the output stream timestamp.
///
/// `frames_available` is the number of frames available for writing as reported by subtracting the
/// result of `GetCurrentPadding` from the maximum buffer size.
///
/// `sample_rate` is the rate at which audio frames are processed by the device.
///
/// TODO: The returned `playback` is an estimate that assumes audio is delivered immediately after
/// `frames_available` are consumed. The reality is that there is likely a tiny amount of latency
/// after this, but not sure how to determine this.
fn output_timestamp(
stream: &StreamInner,
frames_available: u32,
sample_rate: crate::SampleRate,
) -> Result<crate::OutputStreamTimestamp, StreamError> {
let callback = stream_instant(stream)?;
let buffer_duration = frames_to_duration(frames_available, sample_rate);
let playback = callback
.add(buffer_duration)
.expect("`playback` occurs beyond representation supported by `StreamInstant`");
Ok(crate::OutputStreamTimestamp { callback, playback })
}