Merge pull request #354 from mitchmindtree/no-eventloop-rebased

Removing the `EventLoop` - rebased
This commit is contained in:
mitchmindtree 2020-01-13 12:50:05 +01:00 committed by GitHub
commit 59ac088167
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 1994 additions and 2902 deletions

1
.gitignore vendored
View File

@ -3,3 +3,4 @@
.cargo/ .cargo/
.DS_Store .DS_Store
recorded.wav recorded.wav
rls*.log

View File

@ -24,6 +24,7 @@ ringbuf = "0.1.6"
[target.'cfg(target_os = "windows")'.dependencies] [target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "std", "synchapi", "winbase", "winuser"] } winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "std", "synchapi", "winbase", "winuser"] }
asio-sys = { version = "0.1", path = "asio-sys", optional = true } asio-sys = { version = "0.1", path = "asio-sys", optional = true }
parking_lot = "0.9"
[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))'.dependencies] [target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))'.dependencies]
alsa-sys = { version = "0.1", path = "alsa-sys" } alsa-sys = { version = "0.1", path = "alsa-sys" }

View File

@ -7,7 +7,7 @@ use self::errors::{AsioError, AsioErrorWrapper, LoadDriverError};
use std::ffi::CStr; use std::ffi::CStr;
use std::ffi::CString; use std::ffi::CString;
use std::os::raw::{c_char, c_double, c_long, c_void}; use std::os::raw::{c_char, c_double, c_long, c_void};
use std::sync::{Arc, Mutex, Weak}; use std::sync::{Arc, Mutex, MutexGuard, Weak};
// Bindings import // Bindings import
use self::asio_import as ai; use self::asio_import as ai;
@ -85,7 +85,7 @@ pub struct SampleRate {
} }
/// Holds the pointer to the callbacks that come from cpal /// Holds the pointer to the callbacks that come from cpal
struct BufferCallback(Box<FnMut(i32) + Send>); struct BufferCallback(Box<dyn FnMut(i32) + Send>);
/// Input and Output streams. /// Input and Output streams.
/// ///
@ -235,6 +235,9 @@ struct BufferSizes {
grans: c_long, grans: c_long,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct CallbackId(usize);
lazy_static! { lazy_static! {
/// A global way to access all the callbacks. /// A global way to access all the callbacks.
/// ///
@ -244,7 +247,7 @@ lazy_static! {
/// Options are used so that when a callback is removed we don't change the Vec indices. /// Options are used so that when a callback is removed we don't change the Vec indices.
/// ///
/// The indices are how we match a callback with a stream. /// The indices are how we match a callback with a stream.
static ref BUFFER_CALLBACK: Mutex<Vec<Option<BufferCallback>>> = Mutex::new(Vec::new()); static ref BUFFER_CALLBACK: Mutex<Vec<(CallbackId, BufferCallback)>> = Mutex::new(Vec::new());
} }
impl Asio { impl Asio {
@ -419,6 +422,8 @@ impl Driver {
// To pass as ai::ASIOCallbacks // To pass as ai::ASIOCallbacks
let mut callbacks = create_asio_callbacks(); let mut callbacks = create_asio_callbacks();
let mut state = self.inner.lock_state();
// Retrieve the available buffer sizes. // Retrieve the available buffer sizes.
let buffer_sizes = asio_get_buffer_sizes()?; let buffer_sizes = asio_get_buffer_sizes()?;
if buffer_sizes.pref <= 0 { if buffer_sizes.pref <= 0 {
@ -429,13 +434,12 @@ impl Driver {
} }
// Ensure the driver is in the `Initialized` state. // Ensure the driver is in the `Initialized` state.
if let DriverState::Running = self.inner.state() { if let DriverState::Running = *state {
self.stop()?; state.stop()?;
} }
if let DriverState::Prepared = self.inner.state() { if let DriverState::Prepared = *state {
self.dispose_buffers()?; state.dispose_buffers()?;
} }
unsafe { unsafe {
asio_result!(ai::ASIOCreateBuffers( asio_result!(ai::ASIOCreateBuffers(
buffer_infos.as_mut_ptr() as *mut _, buffer_infos.as_mut_ptr() as *mut _,
@ -444,8 +448,8 @@ impl Driver {
&mut callbacks as *mut _ as *mut _, &mut callbacks as *mut _ as *mut _,
))?; ))?;
} }
*state = DriverState::Prepared;
self.inner.set_state(DriverState::Prepared);
Ok(buffer_sizes.pref) Ok(buffer_sizes.pref)
} }
@ -566,13 +570,14 @@ impl Driver {
/// ///
/// No-op if already `Running`. /// No-op if already `Running`.
pub fn start(&self) -> Result<(), AsioError> { pub fn start(&self) -> Result<(), AsioError> {
if let DriverState::Running = self.inner.state() { let mut state = self.inner.lock_state();
if let DriverState::Running = *state {
return Ok(()); return Ok(());
} }
unsafe { unsafe {
asio_result!(ai::ASIOStart())?; asio_result!(ai::ASIOStart())?;
} }
self.inner.set_state(DriverState::Running); *state = DriverState::Running;
Ok(()) Ok(())
} }
@ -589,12 +594,26 @@ impl Driver {
/// Adds a callback to the list of active callbacks. /// Adds a callback to the list of active callbacks.
/// ///
/// The given function receives the index of the buffer currently ready for processing. /// The given function receives the index of the buffer currently ready for processing.
pub fn set_callback<F>(&self, callback: F) ///
/// Returns an ID uniquely associated with the given callback so that it may be removed later.
pub fn add_callback<F>(&self, callback: F) -> CallbackId
where where
F: 'static + FnMut(i32) + Send, F: 'static + FnMut(i32) + Send,
{ {
let mut bc = BUFFER_CALLBACK.lock().unwrap(); let mut bc = BUFFER_CALLBACK.lock().unwrap();
bc.push(Some(BufferCallback(Box::new(callback)))); let id = bc
.last()
.map(|&(id, _)| CallbackId(id.0.checked_add(1).expect("stream ID overflowed")))
.unwrap_or(CallbackId(0));
let cb = BufferCallback(Box::new(callback));
bc.push((id, cb));
id
}
/// Remove the callback with the given ID.
pub fn remove_callback(&self, rem_id: CallbackId) {
let mut bc = BUFFER_CALLBACK.lock().unwrap();
bc.retain(|&(id, _)| id != rem_id);
} }
/// Consumes and destroys the `Driver`, stopping the streams if they are running and releasing /// Consumes and destroys the `Driver`, stopping the streams if they are running and releasing
@ -618,55 +637,70 @@ impl Driver {
} }
} }
impl DriverInner { impl DriverState {
fn state(&self) -> DriverState { fn stop(&mut self) -> Result<(), AsioError> {
*self.state.lock().expect("failed to lock `DriverState`") if let DriverState::Running = *self {
}
fn set_state(&self, state: DriverState) {
*self.state.lock().expect("failed to lock `DriverState`") = state;
}
fn stop_inner(&self) -> Result<(), AsioError> {
if let DriverState::Running = self.state() {
unsafe { unsafe {
asio_result!(ai::ASIOStop())?; asio_result!(ai::ASIOStop())?;
} }
self.set_state(DriverState::Prepared); *self = DriverState::Prepared;
} }
Ok(()) Ok(())
} }
fn dispose_buffers_inner(&self) -> Result<(), AsioError> { fn dispose_buffers(&mut self) -> Result<(), AsioError> {
if let DriverState::Initialized = self.state() { if let DriverState::Initialized = *self {
return Ok(()); return Ok(());
} }
if let DriverState::Running = self.state() { if let DriverState::Running = *self {
self.stop_inner()?; self.stop()?;
} }
unsafe { unsafe {
asio_result!(ai::ASIODisposeBuffers())?; asio_result!(ai::ASIODisposeBuffers())?;
} }
self.set_state(DriverState::Initialized); *self = DriverState::Initialized;
Ok(()) Ok(())
} }
fn destroy_inner(&mut self) -> Result<(), AsioError> { fn destroy(&mut self) -> Result<(), AsioError> {
// Drop back through the driver state machine one state at a time. if let DriverState::Running = *self {
if let DriverState::Running = self.state() { self.stop()?;
self.stop_inner()?;
} }
if let DriverState::Prepared = self.state() { if let DriverState::Prepared = *self {
self.dispose_buffers_inner()?; self.dispose_buffers()?;
} }
unsafe { unsafe {
asio_result!(ai::ASIOExit())?; asio_result!(ai::ASIOExit())?;
ai::remove_current_driver(); ai::remove_current_driver();
} }
Ok(())
}
}
// Clear any existing stream callbacks. impl DriverInner {
if let Ok(mut bcs) = BUFFER_CALLBACK.lock() { fn lock_state(&self) -> MutexGuard<DriverState> {
bcs.clear(); self.state.lock().expect("failed to lock `DriverState`")
}
fn stop_inner(&self) -> Result<(), AsioError> {
let mut state = self.lock_state();
state.stop()
}
fn dispose_buffers_inner(&self) -> Result<(), AsioError> {
let mut state = self.lock_state();
state.dispose_buffers()
}
fn destroy_inner(&mut self) -> Result<(), AsioError> {
{
let mut state = self.lock_state();
state.destroy()?;
// Clear any existing stream callbacks.
if let Ok(mut bcs) = BUFFER_CALLBACK.lock() {
bcs.clear();
}
} }
// Signal that the driver has been destroyed. // Signal that the driver has been destroyed.
@ -863,10 +897,8 @@ extern "C" fn buffer_switch_time_info(
) -> *mut ai::ASIOTime { ) -> *mut ai::ASIOTime {
// This lock is probably unavoidable, but locks in the audio stream are not great. // This lock is probably unavoidable, but locks in the audio stream are not great.
let mut bcs = BUFFER_CALLBACK.lock().unwrap(); let mut bcs = BUFFER_CALLBACK.lock().unwrap();
for mut bc in bcs.iter_mut() { for &mut (_, ref mut bc) in bcs.iter_mut() {
if let Some(ref mut bc) = bc { bc.run(double_buffer_index);
bc.run(double_buffer_index);
}
} }
time time
} }

View File

@ -1,37 +1,26 @@
extern crate anyhow; extern crate anyhow;
extern crate cpal; extern crate cpal;
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait}; use cpal::traits::{DeviceTrait, StreamTrait, HostTrait};
fn main() -> Result<(), anyhow::Error> { fn main() -> Result<(), anyhow::Error> {
let host = cpal::default_host(); let host = cpal::default_host();
let device = host.default_output_device().expect("failed to find a default output device"); let device = host.default_output_device().expect("failed to find a default output device");
let format = device.default_output_format()?; let format = device.default_output_format()?;
let event_loop = host.event_loop();
let stream_id = event_loop.build_output_stream(&device, &format)?;
event_loop.play_stream(stream_id.clone())?;
let sample_rate = format.sample_rate.0 as f32; let sample_rate = format.sample_rate.0 as f32;
let channels = format.channels;
let mut sample_clock = 0f32; let mut sample_clock = 0f32;
// Produce a sinusoid of maximum amplitude. // Produce a sinusoid of maximum amplitude.
let mut next_value = || { let mut next_value = move || {
sample_clock = (sample_clock + 1.0) % sample_rate; sample_clock = (sample_clock + 1.0) % sample_rate;
(sample_clock * 440.0 * 2.0 * 3.141592 / sample_rate).sin() (sample_clock * 440.0 * 2.0 * 3.141592 / sample_rate).sin()
}; };
event_loop.run(move |id, result| { let stream = device.build_output_stream(&format, move |data| {
let data = match result {
Ok(data) => data,
Err(err) => {
eprintln!("an error occurred on stream {:?}: {}", id, err);
return;
}
};
match data { match data {
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::U16(mut buffer) } => { cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::U16(mut buffer) } => {
for sample in buffer.chunks_mut(format.channels as usize) { for sample in buffer.chunks_mut(channels as usize) {
let value = ((next_value() * 0.5 + 0.5) * std::u16::MAX as f32) as u16; let value = ((next_value() * 0.5 + 0.5) * std::u16::MAX as f32) as u16;
for out in sample.iter_mut() { for out in sample.iter_mut() {
*out = value; *out = value;
@ -39,7 +28,7 @@ fn main() -> Result<(), anyhow::Error> {
} }
}, },
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::I16(mut buffer) } => { cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::I16(mut buffer) } => {
for sample in buffer.chunks_mut(format.channels as usize) { for sample in buffer.chunks_mut(channels as usize) {
let value = (next_value() * std::i16::MAX as f32) as i16; let value = (next_value() * std::i16::MAX as f32) as i16;
for out in sample.iter_mut() { for out in sample.iter_mut() {
*out = value; *out = value;
@ -47,7 +36,7 @@ fn main() -> Result<(), anyhow::Error> {
} }
}, },
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => { cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => {
for sample in buffer.chunks_mut(format.channels as usize) { for sample in buffer.chunks_mut(channels as usize) {
let value = next_value(); let value = next_value();
for out in sample.iter_mut() { for out in sample.iter_mut() {
*out = value; *out = value;
@ -56,5 +45,12 @@ fn main() -> Result<(), anyhow::Error> {
}, },
_ => (), _ => (),
} }
}); }, move |err| {
eprintln!("an error occurred on stream: {}", err);
})?;
stream.play()?;
std::thread::sleep(std::time::Duration::from_millis(1000));
Ok(())
} }

View File

@ -10,18 +10,21 @@ extern crate anyhow;
extern crate cpal; extern crate cpal;
extern crate ringbuf; extern crate ringbuf;
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use ringbuf::RingBuffer; use ringbuf::RingBuffer;
const LATENCY_MS: f32 = 150.0; const LATENCY_MS: f32 = 150.0;
fn main() -> Result<(), anyhow::Error> { fn main() -> Result<(), anyhow::Error> {
let host = cpal::default_host(); let host = cpal::default_host();
let event_loop = host.event_loop();
// Default devices. // Default devices.
let input_device = host.default_input_device().expect("failed to get default input device"); let input_device = host
let output_device = host.default_output_device().expect("failed to get default output device"); .default_input_device()
.expect("failed to get default input device");
let output_device = host
.default_output_device()
.expect("failed to get default output device");
println!("Using default input device: \"{}\"", input_device.name()?); println!("Using default input device: \"{}\"", input_device.name()?);
println!("Using default output device: \"{}\"", output_device.name()?); println!("Using default output device: \"{}\"", output_device.name()?);
@ -29,12 +32,6 @@ fn main() -> Result<(), anyhow::Error> {
let mut format = input_device.default_input_format()?; let mut format = input_device.default_input_format()?;
format.data_type = cpal::SampleFormat::F32; format.data_type = cpal::SampleFormat::F32;
// Build streams.
println!("Attempting to build both streams with `{:?}`.", format);
let input_stream_id = event_loop.build_input_stream(&input_device, &format)?;
let output_stream_id = event_loop.build_output_stream(&output_device, &format)?;
println!("Successfully built streams.");
// Create a delay in case the input and output devices aren't synced. // Create a delay in case the input and output devices aren't synced.
let latency_frames = (LATENCY_MS / 1_000.0) * format.sample_rate.0 as f32; let latency_frames = (LATENCY_MS / 1_000.0) * format.sample_rate.0 as f32;
let latency_samples = latency_frames as usize * format.channels as usize; let latency_samples = latency_frames as usize * format.channels as usize;
@ -50,59 +47,67 @@ fn main() -> Result<(), anyhow::Error> {
producer.push(0.0).unwrap(); producer.push(0.0).unwrap();
} }
// Play the streams. // Build streams.
println!("Starting the input and output streams with `{}` milliseconds of latency.", LATENCY_MS); println!("Attempting to build both streams with `{:?}`.", format);
event_loop.play_stream(input_stream_id.clone())?; let input_stream = input_device.build_input_stream(&format, move |data| {
event_loop.play_stream(output_stream_id.clone())?; match data {
cpal::StreamData::Input {
// Run the event loop on a separate thread. buffer: cpal::UnknownTypeInputBuffer::F32(buffer),
std::thread::spawn(move || { } => {
event_loop.run(move |id, result| { let mut output_fell_behind = false;
let data = match result { for &sample in buffer.iter() {
Ok(data) => data, if producer.push(sample).is_err() {
Err(err) => { output_fell_behind = true;
eprintln!("an error occurred on stream {:?}: {}", id, err); }
return;
} }
}; if output_fell_behind {
eprintln!("output stream fell behind: try increasing latency");
}
},
_ => panic!("Expected input with f32 data"),
}
}, move |err| {
eprintln!("an error occurred on input stream: {}", err);
})?;
let output_stream = output_device.build_output_stream(&format, move |data| {
match data {
cpal::StreamData::Output {
buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer),
} => {
let mut input_fell_behind = None;
for sample in buffer.iter_mut() {
*sample = match consumer.pop() {
Ok(s) => s,
Err(err) => {
input_fell_behind = Some(err);
0.0
},
};
}
if let Some(err) = input_fell_behind {
eprintln!("input stream fell behind: {:?}: try increasing latency", err);
}
},
_ => panic!("Expected output with f32 data"),
}
}, move |err| {
eprintln!("an error occurred on output stream: {}", err);
})?;
println!("Successfully built streams.");
match data { // Play the streams.
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::F32(buffer) } => { println!(
assert_eq!(id, input_stream_id); "Starting the input and output streams with `{}` milliseconds of latency.",
let mut output_fell_behind = false; LATENCY_MS
for &sample in buffer.iter() { );
if producer.push(sample).is_err() { input_stream.play()?;
output_fell_behind = true; output_stream.play()?;
}
}
if output_fell_behind {
eprintln!("output stream fell behind: try increasing latency");
}
},
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => {
assert_eq!(id, output_stream_id);
let mut input_fell_behind = None;
for sample in buffer.iter_mut() {
*sample = match consumer.pop() {
Ok(s) => s,
Err(err) => {
input_fell_behind = Some(err);
0.0
},
};
}
if let Some(_) = input_fell_behind {
eprintln!("input stream fell behind: try increasing latency");
}
},
_ => panic!("we're expecting f32 data"),
}
});
});
// Run for 3 seconds before closing. // Run for 3 seconds before closing.
println!("Playing for 3 seconds... "); println!("Playing for 3 seconds... ");
std::thread::sleep(std::time::Duration::from_secs(3)); std::thread::sleep(std::time::Duration::from_secs(3));
drop(input_stream);
drop(output_stream);
println!("Done!"); println!("Done!");
Ok(()) Ok(())
} }

View File

@ -6,21 +6,21 @@ extern crate anyhow;
extern crate cpal; extern crate cpal;
extern crate hound; extern crate hound;
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
fn main() -> Result<(), anyhow::Error> { fn main() -> Result<(), anyhow::Error> {
// Use the default host for working with audio devices. // Use the default host for working with audio devices.
let host = cpal::default_host(); let host = cpal::default_host();
// Setup the default input device and stream with the default input format. // Setup the default input device and stream with the default input format.
let device = host.default_input_device().expect("Failed to get default input device"); let device = host
.default_input_device()
.expect("Failed to get default input device");
println!("Default input device: {}", device.name()?); println!("Default input device: {}", device.name()?);
let format = device.default_input_format().expect("Failed to get default input format"); let format = device
.default_input_format()
.expect("Failed to get default input format");
println!("Default input format: {:?}", format); println!("Default input format: {:?}", format);
let event_loop = host.event_loop();
let stream_id = event_loop.build_input_stream(&device, &format)?;
event_loop.play_stream(stream_id)?;
// The WAV file we're recording to. // The WAV file we're recording to.
const PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/recorded.wav"); const PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/recorded.wav");
let spec = wav_spec_from_format(&format); let spec = wav_spec_from_format(&format);
@ -29,63 +29,56 @@ fn main() -> Result<(), anyhow::Error> {
// A flag to indicate that recording is in progress. // A flag to indicate that recording is in progress.
println!("Begin recording..."); println!("Begin recording...");
let recording = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(true));
// Run the input stream on a separate thread. // Run the input stream on a separate thread.
let writer_2 = writer.clone(); let writer_2 = writer.clone();
let recording_2 = recording.clone(); let stream = device.build_input_stream(&format, move |data| {
std::thread::spawn(move || { // Otherwise write to the wav writer.
event_loop.run(move |id, event| { match data {
let data = match event { cpal::StreamData::Input {
Ok(data) => data, buffer: cpal::UnknownTypeInputBuffer::U16(buffer),
Err(err) => { } => {
eprintln!("an error occurred on stream {:?}: {}", id, err); if let Ok(mut guard) = writer_2.try_lock() {
return; if let Some(writer) = guard.as_mut() {
for sample in buffer.iter() {
let sample = cpal::Sample::to_i16(sample);
writer.write_sample(sample).ok();
}
}
} }
}; },
cpal::StreamData::Input {
// If we're done recording, return early. buffer: cpal::UnknownTypeInputBuffer::I16(buffer),
if !recording_2.load(std::sync::atomic::Ordering::Relaxed) { } => {
return; if let Ok(mut guard) = writer_2.try_lock() {
} if let Some(writer) = guard.as_mut() {
// Otherwise write to the wav writer. for &sample in buffer.iter() {
match data { writer.write_sample(sample).ok();
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::U16(buffer) } => {
if let Ok(mut guard) = writer_2.try_lock() {
if let Some(writer) = guard.as_mut() {
for sample in buffer.iter() {
let sample = cpal::Sample::to_i16(sample);
writer.write_sample(sample).ok();
}
} }
} }
}, }
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::I16(buffer) } => { },
if let Ok(mut guard) = writer_2.try_lock() { cpal::StreamData::Input {
if let Some(writer) = guard.as_mut() { buffer: cpal::UnknownTypeInputBuffer::F32(buffer),
for &sample in buffer.iter() { } => {
writer.write_sample(sample).ok(); if let Ok(mut guard) = writer_2.try_lock() {
} if let Some(writer) = guard.as_mut() {
for &sample in buffer.iter() {
writer.write_sample(sample).ok();
} }
} }
}, }
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::F32(buffer) } => { },
if let Ok(mut guard) = writer_2.try_lock() { _ => (),
if let Some(writer) = guard.as_mut() { }
for &sample in buffer.iter() { }, move |err| {
writer.write_sample(sample).ok(); eprintln!("an error occurred on stream: {}", err);
} })?;
} stream.play()?;
}
},
_ => (),
}
});
});
// Let recording go for roughly three seconds. // Let recording go for roughly three seconds.
std::thread::sleep(std::time::Duration::from_secs(3)); std::thread::sleep(std::time::Duration::from_secs(3));
recording.store(false, std::sync::atomic::Ordering::Relaxed); drop(stream);
writer.lock().unwrap().take().unwrap().finalize()?; writer.lock().unwrap().take().unwrap().finalize()?;
println!("Recording {} complete!", PATH); println!("Recording {} complete!", PATH);
Ok(()) Ok(())

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ pub type SupportedInputFormats = std::vec::IntoIter<SupportedFormat>;
pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>; pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::sync::Arc; use std::sync::{Arc};
use BackendSpecificError; use BackendSpecificError;
use DefaultFormatError; use DefaultFormatError;
use DeviceNameError; use DeviceNameError;
@ -14,12 +14,17 @@ use SampleRate;
use SupportedFormat; use SupportedFormat;
use SupportedFormatsError; use SupportedFormatsError;
use super::sys; use super::sys;
use super::parking_lot::Mutex;
/// A ASIO Device /// A ASIO Device
#[derive(Debug)]
pub struct Device { pub struct Device {
/// The driver represented by this device. /// The driver represented by this device.
pub driver: Arc<sys::Driver>, pub driver: Arc<sys::Driver>,
// Input and/or Output stream.
// An driver can only have one of each.
// They need to be created at the same time.
pub asio_streams: Arc<Mutex<sys::AsioStreams>>,
} }
/// All available devices. /// All available devices.
@ -148,7 +153,14 @@ impl Iterator for Devices {
loop { loop {
match self.drivers.next() { match self.drivers.next() {
Some(name) => match self.asio.load_driver(&name) { Some(name) => match self.asio.load_driver(&name) {
Ok(driver) => return Some(Device { driver: Arc::new(driver) }), Ok(driver) => {
let driver = Arc::new(driver);
let asio_streams = Arc::new(Mutex::new(sys::AsioStreams {
input: None,
output: None,
}));
return Some(Device { driver, asio_streams });
}
Err(_) => continue, Err(_) => continue,
} }
None => return None, None => return None,

View File

@ -1,4 +1,5 @@
extern crate asio_sys as sys; extern crate asio_sys as sys;
extern crate parking_lot;
use { use {
BuildStreamError, BuildStreamError,
@ -8,18 +9,18 @@ use {
Format, Format,
PauseStreamError, PauseStreamError,
PlayStreamError, PlayStreamError,
StreamDataResult,
SupportedFormatsError, SupportedFormatsError,
StreamData,
StreamError,
}; };
use traits::{ use traits::{
DeviceTrait, DeviceTrait,
EventLoopTrait,
HostTrait, HostTrait,
StreamIdTrait, StreamTrait,
}; };
pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats}; pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats};
pub use self::stream::{EventLoop, StreamId}; pub use self::stream::Stream;
use std::sync::Arc; use std::sync::Arc;
mod device; mod device;
@ -42,7 +43,6 @@ impl Host {
impl HostTrait for Host { impl HostTrait for Host {
type Devices = Devices; type Devices = Devices;
type Device = Device; type Device = Device;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
true true
@ -62,15 +62,12 @@ impl HostTrait for Host {
// ASIO has no concept of a default device, so just use the first. // ASIO has no concept of a default device, so just use the first.
self.output_devices().ok().and_then(|mut ds| ds.next()) self.output_devices().ok().and_then(|mut ds| ds.next())
} }
fn event_loop(&self) -> Self::EventLoop {
EventLoop::new()
}
} }
impl DeviceTrait for Device { impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
fn name(&self) -> Result<String, DeviceNameError> { fn name(&self) -> Result<String, DeviceNameError> {
Device::name(self) Device::name(self)
@ -91,46 +88,28 @@ impl DeviceTrait for Device {
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_output_format(self) Device::default_output_format(self)
} }
}
impl EventLoopTrait for EventLoop { fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
type Device = Device;
type StreamId = StreamId;
fn build_input_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_input_stream(self, device, format)
}
fn build_output_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_output_stream(self, device, format)
}
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
EventLoop::play_stream(self, stream)
}
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
EventLoop::pause_stream(self, stream)
}
fn destroy_stream(&self, stream: Self::StreamId) {
EventLoop::destroy_stream(self, stream)
}
fn run<F>(&self, callback: F) -> !
where where
F: FnMut(Self::StreamId, StreamDataResult) + Send, D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static
{ {
EventLoop::run(self, callback) Device::build_input_stream(self, format, data_callback, error_callback)
}
fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static
{
Device::build_output_stream(self, format, data_callback, error_callback)
} }
} }
impl StreamIdTrait for StreamId {} impl StreamTrait for Stream {
fn play(&self) -> Result<(), PlayStreamError> {
Stream::play(self)
}
fn pause(&self) -> Result<(), PauseStreamError> {
Stream::pause(self)
}
}

View File

@ -4,11 +4,9 @@ extern crate num_traits;
use self::num_traits::PrimInt; use self::num_traits::PrimInt;
use super::Device; use super::Device;
use std; use std;
use std::mem; use std::sync::atomic::{Ordering, AtomicBool};
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc;
use std::sync::{Arc, Mutex}; use super::parking_lot::Mutex;
use std::thread;
use std::time::Duration;
use BackendSpecificError; use BackendSpecificError;
use BuildStreamError; use BuildStreamError;
use Format; use Format;
@ -16,9 +14,9 @@ use PauseStreamError;
use PlayStreamError; use PlayStreamError;
use SampleFormat; use SampleFormat;
use StreamData; use StreamData;
use StreamDataResult;
use UnknownTypeInputBuffer; use UnknownTypeInputBuffer;
use UnknownTypeOutputBuffer; use UnknownTypeOutputBuffer;
use StreamError;
/// Sample types whose constant silent value is known. /// Sample types whose constant silent value is known.
trait Silence { trait Silence {
@ -34,35 +32,6 @@ trait InterleavedSample: Clone + Copy + Silence {
/// Constraints on the ASIO sample types. /// Constraints on the ASIO sample types.
trait AsioSample: Clone + Copy + Silence + std::ops::Add<Self, Output = Self> {} trait AsioSample: Clone + Copy + Silence + std::ops::Add<Self, Output = Self> {}
/// Controls all streams
pub struct EventLoop {
/// The input and output ASIO streams
asio_streams: Arc<Mutex<sys::AsioStreams>>,
/// List of all CPAL streams
cpal_streams: Arc<Mutex<Vec<Option<Stream>>>>,
/// Total stream count.
stream_count: AtomicUsize,
/// The CPAL callback that the user gives to fill the buffers.
callbacks: Arc<Mutex<Option<&'static mut (FnMut(StreamId, StreamDataResult) + Send)>>>,
}
/// Id for each stream.
/// Created depending on the number they are created.
/// Starting at one! not zero.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct StreamId(usize);
/// CPAL stream.
/// This decouples the many cpal streams
/// from the single input and single output
/// ASIO streams.
/// Each stream can be playing or paused.
struct Stream {
playing: bool,
// The driver associated with this stream.
driver: Arc<sys::Driver>,
}
// Used to keep track of whether or not the current current asio stream buffer requires // Used to keep track of whether or not the current current asio stream buffer requires
// being silencing before summing audio. // being silencing before summing audio.
#[derive(Default)] #[derive(Default)]
@ -71,114 +40,38 @@ struct SilenceAsioBuffer {
second: bool, second: bool,
} }
impl EventLoop { pub struct Stream {
pub fn new() -> EventLoop { playing: Arc<AtomicBool>,
EventLoop { // Ensure the `Driver` does not terminate until the last stream is dropped.
asio_streams: Arc::new(Mutex::new(sys::AsioStreams { driver: Arc<sys::Driver>,
input: None, asio_streams: Arc<Mutex<sys::AsioStreams>>,
output: None, callback_id: sys::CallbackId,
})), }
cpal_streams: Arc::new(Mutex::new(Vec::new())),
// This is why the Id's count from one not zero impl Stream {
// because at this point there is no streams pub fn play(&self) -> Result<(), PlayStreamError> {
stream_count: AtomicUsize::new(0), self.playing.store(true, Ordering::SeqCst);
callbacks: Arc::new(Mutex::new(None)), Ok(())
}
} }
/// Create a new CPAL Input Stream. pub fn pause(&self) -> Result<(), PauseStreamError> {
/// self.playing.store(false, Ordering::SeqCst);
/// If there is no existing ASIO Input Stream it will be created. Ok(())
///
/// On success, the buffer size of the stream is returned.
fn get_or_create_input_stream(
&self,
driver: &sys::Driver,
format: &Format,
device: &Device,
) -> Result<usize, BuildStreamError> {
match device.default_input_format() {
Ok(f) => {
let num_asio_channels = f.channels;
check_format(driver, format, num_asio_channels)
},
Err(_) => Err(BuildStreamError::FormatNotSupported),
}?;
let num_channels = format.channels as usize;
let ref mut streams = *self.asio_streams.lock().unwrap();
// Either create a stream if thers none or had back the
// size of the current one.
match streams.input {
Some(ref input) => Ok(input.buffer_size as usize),
None => {
let output = streams.output.take();
driver
.prepare_input_stream(output, num_channels)
.map(|new_streams| {
let bs = match new_streams.input {
Some(ref inp) => inp.buffer_size as usize,
None => unreachable!(),
};
*streams = new_streams;
bs
}).map_err(|ref e| {
println!("Error preparing stream: {}", e);
BuildStreamError::DeviceNotAvailable
})
}
}
} }
}
/// Create a new CPAL Output Stream. impl Device {
/// pub fn build_input_stream<D, E>(
/// If there is no existing ASIO Output Stream it will be created.
///
/// On success, the buffer size of the stream is returned.
fn get_or_create_output_stream(
&self, &self,
driver: &sys::Driver,
format: &Format, format: &Format,
device: &Device, mut data_callback: D,
) -> Result<usize, BuildStreamError> { _error_callback: E,
match device.default_output_format() { ) -> Result<Stream, BuildStreamError>
Ok(f) => { where
let num_asio_channels = f.channels; D: FnMut(StreamData) + Send + 'static,
check_format(driver, format, num_asio_channels) E: FnMut(StreamError) + Send + 'static
}, {
Err(_) => Err(BuildStreamError::FormatNotSupported), let stream_type = self.driver.input_data_type().map_err(build_stream_err)?;
}?;
let num_channels = format.channels as usize;
let ref mut streams = *self.asio_streams.lock().unwrap();
// Either create a stream if there's none or return the size of the current one.
match streams.output {
Some(ref output) => Ok(output.buffer_size as usize),
None => {
let input = streams.input.take();
driver
.prepare_output_stream(input, num_channels)
.map(|new_streams| {
let bs = match new_streams.output {
Some(ref out) => out.buffer_size as usize,
None => unreachable!(),
};
*streams = new_streams;
bs
}).map_err(|ref e| {
println!("Error preparing stream: {}", e);
BuildStreamError::DeviceNotAvailable
})
}
}
}
/// Builds a new cpal input stream
pub fn build_input_stream(
&self,
device: &Device,
format: &Format,
) -> Result<StreamId, BuildStreamError> {
let Device { driver, .. } = device;
let stream_type = driver.input_data_type().map_err(build_stream_err)?;
// Ensure that the desired sample type is supported. // Ensure that the desired sample type is supported.
let data_type = super::device::convert_data_type(&stream_type) let data_type = super::device::convert_data_type(&stream_type)
@ -188,48 +81,36 @@ impl EventLoop {
} }
let num_channels = format.channels.clone(); let num_channels = format.channels.clone();
let stream_buffer_size = self.get_or_create_input_stream(&driver, format, device)?; let buffer_size = self.get_or_create_input_stream(format)?;
let cpal_num_samples = stream_buffer_size * num_channels as usize; let cpal_num_samples = buffer_size * num_channels as usize;
let count = self.stream_count.fetch_add(1, Ordering::SeqCst);
let asio_streams = self.asio_streams.clone();
let cpal_streams = self.cpal_streams.clone();
let callbacks = self.callbacks.clone();
// Create the buffer depending on the size of the data type. // Create the buffer depending on the size of the data type.
let stream_id = StreamId(count);
let len_bytes = cpal_num_samples * data_type.sample_size(); let len_bytes = cpal_num_samples * data_type.sample_size();
let mut interleaved = vec![0u8; len_bytes]; let mut interleaved = vec![0u8; len_bytes];
let stream_playing = Arc::new(AtomicBool::new(false));
let playing = Arc::clone(&stream_playing);
let asio_streams = self.asio_streams.clone();
// Set the input callback. // Set the input callback.
// This is most performance critical part of the ASIO bindings. // This is most performance critical part of the ASIO bindings.
driver.set_callback(move |buffer_index| unsafe { let callback_id = self.driver.add_callback(move |buffer_index| unsafe {
// If not playing return early. // If not playing return early.
// TODO: Don't assume `count` is valid - we should search for the matching `StreamId`. if !playing.load(Ordering::SeqCst) {
if let Some(s) = cpal_streams.lock().unwrap().get(count) { return
if let Some(s) = s {
if !s.playing {
return;
}
}
} }
// Acquire the stream and callback. // There is 0% chance of lock contention the host only locks when recreating streams.
let stream_lock = asio_streams.lock().unwrap(); let stream_lock = asio_streams.lock();
let ref asio_stream = match stream_lock.input { let ref asio_stream = match stream_lock.input {
Some(ref asio_stream) => asio_stream, Some(ref asio_stream) => asio_stream,
None => return, None => return,
}; };
let mut callbacks = callbacks.lock().unwrap();
let callback = match callbacks.as_mut() {
Some(callback) => callback,
None => return,
};
/// 1. Write from the ASIO buffer to the interleaved CPAL buffer. /// 1. Write from the ASIO buffer to the interleaved CPAL buffer.
/// 2. Deliver the CPAL buffer to the user callback. /// 2. Deliver the CPAL buffer to the user callback.
unsafe fn process_input_callback<A, B, F, G>( unsafe fn process_input_callback<A, B, D, F, G>(
stream_id: StreamId, callback: &mut D,
callback: &mut (dyn FnMut(StreamId, StreamDataResult) + Send),
interleaved: &mut [u8], interleaved: &mut [u8],
asio_stream: &sys::AsioStream, asio_stream: &sys::AsioStream,
buffer_index: usize, buffer_index: usize,
@ -239,6 +120,7 @@ impl EventLoop {
where where
A: AsioSample, A: AsioSample,
B: InterleavedSample, B: InterleavedSample,
D: FnMut(StreamData) + Send + 'static,
F: Fn(A) -> A, F: Fn(A) -> A,
G: Fn(A) -> B, G: Fn(A) -> B,
{ {
@ -254,16 +136,14 @@ impl EventLoop {
// 2. Deliver the interleaved buffer to the callback. // 2. Deliver the interleaved buffer to the callback.
callback( callback(
stream_id, StreamData::Input { buffer: B::unknown_type_input_buffer(interleaved) },
Ok(StreamData::Input { buffer: B::unknown_type_input_buffer(interleaved) }),
); );
} }
match (&stream_type, data_type) { match (&stream_type, data_type) {
(&sys::AsioSampleType::ASIOSTInt16LSB, SampleFormat::I16) => { (&sys::AsioSampleType::ASIOSTInt16LSB, SampleFormat::I16) => {
process_input_callback::<i16, i16, _, _>( process_input_callback::<i16, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -272,9 +152,8 @@ impl EventLoop {
); );
} }
(&sys::AsioSampleType::ASIOSTInt16MSB, SampleFormat::I16) => { (&sys::AsioSampleType::ASIOSTInt16MSB, SampleFormat::I16) => {
process_input_callback::<i16, i16, _, _>( process_input_callback::<i16, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -287,9 +166,8 @@ impl EventLoop {
// trait for the `to_le` and `to_be` methods, but this does not support floats. // trait for the `to_le` and `to_be` methods, but this does not support floats.
(&sys::AsioSampleType::ASIOSTFloat32LSB, SampleFormat::F32) | (&sys::AsioSampleType::ASIOSTFloat32LSB, SampleFormat::F32) |
(&sys::AsioSampleType::ASIOSTFloat32MSB, SampleFormat::F32) => { (&sys::AsioSampleType::ASIOSTFloat32MSB, SampleFormat::F32) => {
process_input_callback::<f32, f32, _, _>( process_input_callback::<f32, f32, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -302,9 +180,8 @@ impl EventLoop {
// `process_output_callback` function above by removing the unnecessary sample // `process_output_callback` function above by removing the unnecessary sample
// conversion function. // conversion function.
(&sys::AsioSampleType::ASIOSTInt32LSB, SampleFormat::I16) => { (&sys::AsioSampleType::ASIOSTInt32LSB, SampleFormat::I16) => {
process_input_callback::<i32, i16, _, _>( process_input_callback::<i32, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -313,9 +190,8 @@ impl EventLoop {
); );
} }
(&sys::AsioSampleType::ASIOSTInt32MSB, SampleFormat::I16) => { (&sys::AsioSampleType::ASIOSTInt32MSB, SampleFormat::I16) => {
process_input_callback::<i32, i16, _, _>( process_input_callback::<i32, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -327,9 +203,8 @@ impl EventLoop {
// trait for the `to_le` and `to_be` methods, but this does not support floats. // trait for the `to_le` and `to_be` methods, but this does not support floats.
(&sys::AsioSampleType::ASIOSTFloat64LSB, SampleFormat::F32) | (&sys::AsioSampleType::ASIOSTFloat64LSB, SampleFormat::F32) |
(&sys::AsioSampleType::ASIOSTFloat64MSB, SampleFormat::F32) => { (&sys::AsioSampleType::ASIOSTFloat64MSB, SampleFormat::F32) => {
process_input_callback::<f64, f32, _, _>( process_input_callback::<f64, f32, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
asio_stream, asio_stream,
buffer_index as usize, buffer_index as usize,
@ -345,23 +220,31 @@ impl EventLoop {
} }
}); });
// Create stream and set to paused let driver = self.driver.clone();
self.cpal_streams let asio_streams = self.asio_streams.clone();
.lock()
.unwrap()
.push(Some(Stream { driver: driver.clone(), playing: false }));
Ok(StreamId(count)) // Immediately start the device?
self.driver.start().map_err(build_stream_err)?;
Ok(Stream {
playing: stream_playing,
driver,
asio_streams,
callback_id,
})
} }
/// Create the an output cpal stream. pub fn build_output_stream<D, E>(
pub fn build_output_stream(
&self, &self,
device: &Device,
format: &Format, format: &Format,
) -> Result<StreamId, BuildStreamError> { mut data_callback: D,
let Device { driver, .. } = device; _error_callback: E,
let stream_type = driver.output_data_type().map_err(build_stream_err)?; ) -> Result<Stream, BuildStreamError>
where
D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
let stream_type = self.driver.output_data_type().map_err(build_stream_err)?;
// Ensure that the desired sample type is supported. // Ensure that the desired sample type is supported.
let data_type = super::device::convert_data_type(&stream_type) let data_type = super::device::convert_data_type(&stream_type)
@ -371,38 +254,30 @@ impl EventLoop {
} }
let num_channels = format.channels.clone(); let num_channels = format.channels.clone();
let stream_buffer_size = self.get_or_create_output_stream(&driver, format, device)?; let buffer_size = self.get_or_create_output_stream(format)?;
let cpal_num_samples = stream_buffer_size * num_channels as usize; let cpal_num_samples = buffer_size * num_channels as usize;
let count = self.stream_count.fetch_add(1, Ordering::SeqCst);
let asio_streams = self.asio_streams.clone();
let cpal_streams = self.cpal_streams.clone();
let callbacks = self.callbacks.clone();
// Create buffers depending on data type. // Create buffers depending on data type.
let stream_id = StreamId(count);
let len_bytes = cpal_num_samples * data_type.sample_size(); let len_bytes = cpal_num_samples * data_type.sample_size();
let mut interleaved = vec![0u8; len_bytes]; let mut interleaved = vec![0u8; len_bytes];
let mut silence_asio_buffer = SilenceAsioBuffer::default(); let mut silence_asio_buffer = SilenceAsioBuffer::default();
driver.set_callback(move |buffer_index| unsafe { let stream_playing = Arc::new(AtomicBool::new(false));
let playing = Arc::clone(&stream_playing);
let asio_streams = self.asio_streams.clone();
let callback_id = self.driver.add_callback(move |buffer_index| unsafe {
// If not playing, return early. // If not playing, return early.
// TODO: Don't assume `count` is valid - we should search for the matching `StreamId`. if !playing.load(Ordering::SeqCst) {
if let Some(s) = cpal_streams.lock().unwrap().get(count) { return
if let Some(s) = s {
if !s.playing {
return ();
}
}
} }
// Acquire the stream and callback. // There is 0% chance of lock contention the host only locks when recreating streams.
let stream_lock = asio_streams.lock().unwrap(); let stream_lock = asio_streams.lock();
let ref asio_stream = match stream_lock.output { let ref asio_stream = match stream_lock.output {
Some(ref asio_stream) => asio_stream, Some(ref asio_stream) => asio_stream,
None => return, None => return,
}; };
let mut callbacks = callbacks.lock().unwrap();
let callback = callbacks.as_mut();
// Silence the ASIO buffer that is about to be used. // Silence the ASIO buffer that is about to be used.
// //
@ -430,9 +305,8 @@ impl EventLoop {
/// 2. If required, silence the ASIO buffer. /// 2. If required, silence the ASIO buffer.
/// 3. Finally, write the interleaved data to the non-interleaved ASIO buffer, /// 3. Finally, write the interleaved data to the non-interleaved ASIO buffer,
/// performing endianness conversions as necessary. /// performing endianness conversions as necessary.
unsafe fn process_output_callback<A, B, F, G>( unsafe fn process_output_callback<A, B, D, F, G>(
stream_id: StreamId, callback: &mut D,
callback: Option<&mut &mut (dyn FnMut(StreamId, StreamDataResult) + Send)>,
interleaved: &mut [u8], interleaved: &mut [u8],
silence_asio_buffer: bool, silence_asio_buffer: bool,
asio_stream: &sys::AsioStream, asio_stream: &sys::AsioStream,
@ -443,18 +317,14 @@ impl EventLoop {
where where
A: InterleavedSample, A: InterleavedSample,
B: AsioSample, B: AsioSample,
D: FnMut(StreamData) + Send + 'static,
F: Fn(A) -> B, F: Fn(A) -> B,
G: Fn(B) -> B, G: Fn(B) -> B,
{ {
// 1. Render interleaved buffer from callback. // 1. Render interleaved buffer from callback.
let interleaved: &mut [A] = cast_slice_mut(interleaved); let interleaved: &mut [A] = cast_slice_mut(interleaved);
match callback { let buffer = A::unknown_type_output_buffer(interleaved);
None => interleaved.iter_mut().for_each(|s| *s = A::SILENCE), callback(StreamData::Output { buffer });
Some(callback) => {
let buffer = A::unknown_type_output_buffer(interleaved);
callback(stream_id, Ok(StreamData::Output { buffer }));
}
}
// 2. Silence ASIO channels if necessary. // 2. Silence ASIO channels if necessary.
let n_channels = interleaved.len() / asio_stream.buffer_size as usize; let n_channels = interleaved.len() / asio_stream.buffer_size as usize;
@ -478,9 +348,8 @@ impl EventLoop {
match (data_type, &stream_type) { match (data_type, &stream_type) {
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16LSB) => { (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16LSB) => {
process_output_callback::<i16, i16, _, _>( process_output_callback::<i16, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -490,9 +359,8 @@ impl EventLoop {
); );
} }
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16MSB) => { (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16MSB) => {
process_output_callback::<i16, i16, _, _>( process_output_callback::<i16, i16, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -506,9 +374,8 @@ impl EventLoop {
// trait for the `to_le` and `to_be` methods, but this does not support floats. // trait for the `to_le` and `to_be` methods, but this does not support floats.
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32LSB) | (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32LSB) |
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32MSB) => { (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32MSB) => {
process_output_callback::<f32, f32, _, _>( process_output_callback::<f32, f32, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -522,9 +389,8 @@ impl EventLoop {
// `process_output_callback` function above by removing the unnecessary sample // `process_output_callback` function above by removing the unnecessary sample
// conversion function. // conversion function.
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32LSB) => { (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32LSB) => {
process_output_callback::<i16, i32, _, _>( process_output_callback::<i16, i32, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -534,9 +400,8 @@ impl EventLoop {
); );
} }
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32MSB) => { (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32MSB) => {
process_output_callback::<i16, i32, _, _>( process_output_callback::<i16, i32, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -549,9 +414,8 @@ impl EventLoop {
// trait for the `to_le` and `to_be` methods, but this does not support floats. // trait for the `to_le` and `to_be` methods, but this does not support floats.
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64LSB) | (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64LSB) |
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64MSB) => { (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64MSB) => {
process_output_callback::<f32, f64, _, _>( process_output_callback::<f32, f64, _, _, _>(
stream_id, &mut data_callback,
callback,
&mut interleaved, &mut interleaved,
silence, silence,
asio_stream, asio_stream,
@ -568,78 +432,104 @@ impl EventLoop {
} }
}); });
// Create the stream paused let driver = self.driver.clone();
self.cpal_streams let asio_streams = self.asio_streams.clone();
.lock()
.unwrap()
.push(Some(Stream { driver: driver.clone(), playing: false }));
// Give the ID based on the stream count // Immediately start the device?
Ok(StreamId(count)) self.driver.start().map_err(build_stream_err)?;
Ok(Stream {
playing: stream_playing,
driver,
asio_streams,
callback_id,
})
} }
/// Play the cpal stream for the given ID. /// Create a new CPAL Input Stream.
pub fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> {
let mut streams = self.cpal_streams.lock().unwrap();
if let Some(s) = streams.get_mut(stream_id.0).expect("Bad play stream index") {
s.playing = true;
// Calling play when already playing is a no-op
s.driver.start().map_err(play_stream_err)?;
}
Ok(())
}
/// Pause the cpal stream for the given ID.
/// ///
/// Pause the ASIO streams if there are no other CPAL streams playing, as ASIO only allows /// If there is no existing ASIO Input Stream it will be created.
/// stopping the entire driver. ///
pub fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> { /// On success, the buffer size of the stream is returned.
let mut streams = self.cpal_streams.lock().unwrap(); fn get_or_create_input_stream(
let streams_playing = streams.iter() &self,
.filter(|s| s.as_ref().map(|s| s.playing).unwrap_or(false)) format: &Format,
.count(); ) -> Result<usize, BuildStreamError> {
if let Some(s) = streams.get_mut(stream_id.0).expect("Bad pause stream index") { match self.default_input_format() {
if streams_playing <= 1 { Ok(f) => {
s.driver.stop().map_err(pause_stream_err)?; let num_asio_channels = f.channels;
check_format(&self.driver, format, num_asio_channels)
},
Err(_) => Err(BuildStreamError::FormatNotSupported),
}?;
let num_channels = format.channels as usize;
let ref mut streams = *self.asio_streams.lock();
// Either create a stream if thers none or had back the
// size of the current one.
match streams.input {
Some(ref input) => Ok(input.buffer_size as usize),
None => {
let output = streams.output.take();
self.driver
.prepare_input_stream(output, num_channels)
.map(|new_streams| {
let bs = match new_streams.input {
Some(ref inp) => inp.buffer_size as usize,
None => unreachable!(),
};
*streams = new_streams;
bs
}).map_err(|ref e| {
println!("Error preparing stream: {}", e);
BuildStreamError::DeviceNotAvailable
})
} }
s.playing = false;
} }
Ok(())
} }
/// Destroy the cpal stream based on the ID. /// Create a new CPAL Output Stream.
pub fn destroy_stream(&self, stream_id: StreamId) { ///
// TODO: Should we not also remove an ASIO stream here? /// If there is no existing ASIO Output Stream it will be created.
// Yes, and we should update the logic in the callbacks to search for the stream with fn get_or_create_output_stream(
// the matching ID, rather than assuming the index associated with the ID is valid. &self,
let mut streams = self.cpal_streams.lock().unwrap(); format: &Format,
streams.get_mut(stream_id.0).take(); ) -> Result<usize, BuildStreamError> {
} match self.default_output_format() {
Ok(f) => {
/// Run the cpal callbacks let num_asio_channels = f.channels;
pub fn run<F>(&self, mut callback: F) -> ! check_format(&self.driver, format, num_asio_channels)
where },
F: FnMut(StreamId, StreamDataResult) + Send, Err(_) => Err(BuildStreamError::FormatNotSupported),
{ }?;
let callback: &mut (FnMut(StreamId, StreamDataResult) + Send) = &mut callback; let num_channels = format.channels as usize;
// Transmute needed to convince the compiler that the callback has a static lifetime let ref mut streams = *self.asio_streams.lock();
*self.callbacks.lock().unwrap() = Some(unsafe { mem::transmute(callback) }); // Either create a stream if thers none or had back the
loop { // size of the current one.
// A sleep here to prevent the loop being match streams.output {
// removed in --release Some(ref output) => Ok(output.buffer_size as usize),
thread::sleep(Duration::new(1u64, 0u32)); None => {
let output = streams.output.take();
self.driver
.prepare_output_stream(output, num_channels)
.map(|new_streams| {
let bs = match new_streams.output {
Some(ref out) => out.buffer_size as usize,
None => unreachable!(),
};
*streams = new_streams;
bs
}).map_err(|ref e| {
println!("Error preparing stream: {}", e);
BuildStreamError::DeviceNotAvailable
})
}
} }
} }
} }
/// Clean up if event loop is dropped. impl Drop for Stream {
/// Currently event loop is never dropped.
impl Drop for EventLoop {
fn drop(&mut self) { fn drop(&mut self) {
*self.asio_streams.lock().unwrap() = sys::AsioStreams { self.driver.remove_callback(self.callback_id);
output: None,
input: None,
};
} }
} }
@ -790,25 +680,3 @@ fn build_stream_err(e: sys::AsioError) -> BuildStreamError {
} }
} }
} }
fn pause_stream_err(e: sys::AsioError) -> PauseStreamError {
match e {
sys::AsioError::NoDrivers |
sys::AsioError::HardwareMalfunction => PauseStreamError::DeviceNotAvailable,
err => {
let description = format!("{}", err);
BackendSpecificError { description }.into()
}
}
}
fn play_stream_err(e: sys::AsioError) -> PlayStreamError {
match e {
sys::AsioError::NoDrivers |
sys::AsioError::HardwareMalfunction => PlayStreamError::DeviceNotAvailable,
err => {
let description = format!("{}", err);
BackendSpecificError { description }.into()
}
}
}

View File

@ -14,20 +14,18 @@ use SupportedFormatsError;
use SampleFormat; use SampleFormat;
use SampleRate; use SampleRate;
use StreamData; use StreamData;
use StreamDataResult; use StreamError;
use SupportedFormat; use SupportedFormat;
use UnknownTypeInputBuffer; use UnknownTypeInputBuffer;
use UnknownTypeOutputBuffer; use UnknownTypeOutputBuffer;
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait}; use traits::{DeviceTrait, HostTrait, StreamTrait};
use std::ffi::CStr; use std::ffi::CStr;
use std::fmt; use std::fmt;
use std::mem; use std::mem;
use std::cell::RefCell;
use std::os::raw::c_char; use std::os::raw::c_char;
use std::ptr::null; use std::ptr::null;
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::Duration;
use std::slice; use std::slice;
use self::coreaudio::audio_unit::{AudioUnit, Scope, Element}; use self::coreaudio::audio_unit::{AudioUnit, Scope, Element};
@ -87,7 +85,6 @@ impl Host {
impl HostTrait for Host { impl HostTrait for Host {
type Devices = Devices; type Devices = Devices;
type Device = Device; type Device = Device;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
// Assume coreaudio is always available on macOS and iOS. // Assume coreaudio is always available on macOS and iOS.
@ -105,15 +102,12 @@ impl HostTrait for Host {
fn default_output_device(&self) -> Option<Self::Device> { fn default_output_device(&self) -> Option<Self::Device> {
default_output_device() default_output_device()
} }
fn event_loop(&self) -> Self::EventLoop {
EventLoop::new()
}
} }
impl DeviceTrait for Device { impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
fn name(&self) -> Result<String, DeviceNameError> { fn name(&self) -> Result<String, DeviceNameError> {
Device::name(self) Device::name(self)
@ -134,50 +128,16 @@ impl DeviceTrait for Device {
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_output_format(self) Device::default_output_format(self)
} }
}
impl EventLoopTrait for EventLoop { fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
type Device = Device; Device::build_input_stream(self, format, data_callback, error_callback)
type StreamId = StreamId;
fn build_input_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_input_stream(self, device, format)
} }
fn build_output_stream( fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
&self, Device::build_output_stream(self, format, data_callback, error_callback)
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_output_stream(self, device, format)
}
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
EventLoop::play_stream(self, stream)
}
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
EventLoop::pause_stream(self, stream)
}
fn destroy_stream(&self, stream: Self::StreamId) {
EventLoop::destroy_stream(self, stream)
}
fn run<F>(&self, callback: F) -> !
where
F: FnMut(Self::StreamId, StreamDataResult) + Send,
{
EventLoop::run(self, callback)
} }
} }
impl StreamIdTrait for StreamId {}
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
pub struct Device { pub struct Device {
audio_device_id: AudioDeviceID, audio_device_id: AudioDeviceID,
@ -420,31 +380,6 @@ impl fmt::Debug for Device {
} }
} }
// The ID of a stream is its index within the `streams` array of the events loop.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StreamId(usize);
pub struct EventLoop {
// This `Arc` is shared with all the callbacks of coreaudio.
//
// TODO: Eventually, CPAL's API should be changed to allow for submitting a unique callback per
// stream to avoid streams blocking one another.
user_callback: Arc<Mutex<UserCallback>>,
streams: Mutex<Vec<Option<StreamInner>>>,
loop_cond: Arc<(Mutex<bool>, Condvar)>,
}
enum UserCallback {
// When `run` is called with a callback, that callback will be stored here.
//
// It is essential for the safety of the program that this callback is removed before `run`
// returns (not possible with the current CPAL API).
Active(&'static mut (dyn FnMut(StreamId, StreamDataResult) + Send)),
// A queue of events that have occurred but that have not yet been emitted to the user as we
// don't yet have a callback to do so.
Inactive,
}
struct StreamInner { struct StreamInner {
playing: bool, playing: bool,
audio_unit: AudioUnit, audio_unit: AudioUnit,
@ -540,75 +475,8 @@ fn audio_unit_from_device(device: &Device, input: bool) -> Result<AudioUnit, cor
Ok(audio_unit) Ok(audio_unit)
} }
impl EventLoop { impl Device {
#[inline] fn build_input_stream<D, E>(&self, format: &Format, mut data_callback: D, _error_callback: E) -> Result<Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
fn new() -> EventLoop {
EventLoop {
user_callback: Arc::new(Mutex::new(UserCallback::Inactive)),
streams: Mutex::new(Vec::new()),
loop_cond: Arc::new((Mutex::new(false), Condvar::new())),
}
}
#[inline]
fn run<F>(&self, mut callback: F) -> !
where F: FnMut(StreamId, StreamDataResult) + Send
{
{
let mut guard = self.user_callback.lock().unwrap();
if let UserCallback::Active(_) = *guard {
panic!("`EventLoop::run` was called when the event loop was already running");
}
let callback: &mut (dyn FnMut(StreamId, StreamDataResult) + Send) = &mut callback;
*guard = UserCallback::Active(unsafe { mem::transmute(callback) });
}
// Wait on a condvar to notify, which should never happen.
let &(ref lock, ref cvar) = &*self.loop_cond;
let mut running = lock.lock().unwrap();
*running = true;
while *running {
running = cvar.wait(running).unwrap();
}
unreachable!("current `EventLoop` API requires that `run` may not return");
// It is critical that we remove the callback before returning (currently not possible).
// *self.user_callback.lock().unwrap() = UserCallback::Inactive;
}
fn next_stream_id(&self) -> usize {
let streams_lock = self.streams.lock().unwrap();
let stream_id = streams_lock
.iter()
.position(|n| n.is_none())
.unwrap_or(streams_lock.len());
stream_id
}
// Add the stream to the list of streams within `self`.
fn add_stream(&self, stream_id: usize, au: AudioUnit, device_id: AudioDeviceID) {
let inner = StreamInner {
playing: true,
audio_unit: au,
device_id: device_id,
};
let mut streams_lock = self.streams.lock().unwrap();
if stream_id == streams_lock.len() {
streams_lock.push(Some(inner));
} else {
streams_lock[stream_id] = Some(inner);
}
}
#[inline]
fn build_input_stream(
&self,
device: &Device,
format: &Format,
) -> Result<StreamId, BuildStreamError>
{
// The scope and element for working with a device's input stream. // The scope and element for working with a device's input stream.
let scope = Scope::Output; let scope = Scope::Output;
let element = Element::Input; let element = Element::Input;
@ -624,7 +492,7 @@ impl EventLoop {
let sample_rate: f64 = 0.0; let sample_rate: f64 = 0.0;
let data_size = mem::size_of::<f64>() as u32; let data_size = mem::size_of::<f64>() as u32;
let status = AudioObjectGetPropertyData( let status = AudioObjectGetPropertyData(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
0, 0,
null(), null(),
@ -635,26 +503,11 @@ impl EventLoop {
// If the requested sample rate is different to the device sample rate, update the device. // If the requested sample rate is different to the device sample rate, update the device.
if sample_rate as u32 != format.sample_rate.0 { if sample_rate as u32 != format.sample_rate.0 {
// In order to avoid breaking existing input streams we return an error if there is
// already an active input stream for this device with the actual sample rate.
for stream in &*self.streams.lock().unwrap() {
if let Some(stream) = stream.as_ref() {
if stream.device_id == device.audio_device_id {
let description = "cannot change device sample rate for stream as an \
existing stream is already running at the current sample rate"
.into();
let err = BackendSpecificError { description };
return Err(err.into());
}
}
}
// Get available sample rate ranges. // Get available sample rate ranges.
property_address.mSelector = kAudioDevicePropertyAvailableNominalSampleRates; property_address.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;
let data_size = 0u32; let data_size = 0u32;
let status = AudioObjectGetPropertyDataSize( let status = AudioObjectGetPropertyDataSize(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
0, 0,
null(), null(),
@ -665,7 +518,7 @@ impl EventLoop {
let mut ranges: Vec<u8> = vec![]; let mut ranges: Vec<u8> = vec![];
ranges.reserve_exact(data_size as usize); ranges.reserve_exact(data_size as usize);
let status = AudioObjectGetPropertyData( let status = AudioObjectGetPropertyData(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
0, 0,
null(), null(),
@ -719,7 +572,7 @@ impl EventLoop {
// Add our sample rate change listener callback. // Add our sample rate change listener callback.
let reported_rate: f64 = 0.0; let reported_rate: f64 = 0.0;
let status = AudioObjectAddPropertyListener( let status = AudioObjectAddPropertyListener(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
Some(rate_listener), Some(rate_listener),
&reported_rate as *const _ as *mut _, &reported_rate as *const _ as *mut _,
@ -729,7 +582,7 @@ impl EventLoop {
// Finally, set the sample rate. // Finally, set the sample rate.
let sample_rate = sample_rate as f64; let sample_rate = sample_rate as f64;
let status = AudioObjectSetPropertyData( let status = AudioObjectSetPropertyData(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
0, 0,
null(), null(),
@ -753,7 +606,7 @@ impl EventLoop {
// Remove the `rate_listener` callback. // Remove the `rate_listener` callback.
let status = AudioObjectRemovePropertyListener( let status = AudioObjectRemovePropertyListener(
device.audio_device_id, self.audio_device_id,
&property_address as *const _, &property_address as *const _,
Some(rate_listener), Some(rate_listener),
&reported_rate as *const _ as *mut _, &reported_rate as *const _ as *mut _,
@ -762,18 +615,14 @@ impl EventLoop {
} }
} }
let mut audio_unit = audio_unit_from_device(device, true)?; let mut audio_unit = audio_unit_from_device(self, true)?;
// Set the stream in interleaved mode. // Set the stream in interleaved mode.
let asbd = asbd_from_format(format); let asbd = asbd_from_format(format);
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
// Determine the future ID of the stream.
let stream_id = self.next_stream_id();
// Register the callback that is being called by coreaudio whenever it needs data to be // Register the callback that is being called by coreaudio whenever it needs data to be
// fed to the audio buffer. // fed to the audio buffer.
let user_callback = self.user_callback.clone();
let sample_format = format.data_type; let sample_format = format.data_type;
let bytes_per_channel = format.data_type.sample_size(); let bytes_per_channel = format.data_type.sample_size();
type Args = render_callback::Args<data::Raw>; type Args = render_callback::Args<data::Raw>;
@ -789,20 +638,14 @@ impl EventLoop {
mData: data mData: data
} = buffers[0]; } = buffers[0];
let mut user_callback = user_callback.lock().unwrap();
// A small macro to simplify handling the callback for different sample types. // A small macro to simplify handling the callback for different sample types.
macro_rules! try_callback { macro_rules! try_callback {
($SampleFormat:ident, $SampleType:ty) => {{ ($SampleFormat:ident, $SampleType:ty) => {{
let data_len = (data_byte_size as usize / bytes_per_channel) as usize; let data_len = (data_byte_size as usize / bytes_per_channel) as usize;
let data_slice = slice::from_raw_parts(data as *const $SampleType, data_len); let data_slice = slice::from_raw_parts(data as *const $SampleType, data_len);
let callback = match *user_callback {
UserCallback::Active(ref mut cb) => cb,
UserCallback::Inactive => return Ok(()),
};
let unknown_type_buffer = UnknownTypeInputBuffer::$SampleFormat(::InputBuffer { buffer: data_slice }); let unknown_type_buffer = UnknownTypeInputBuffer::$SampleFormat(::InputBuffer { buffer: data_slice });
let stream_data = StreamData::Input { buffer: unknown_type_buffer }; let stream_data = StreamData::Input { buffer: unknown_type_buffer };
callback(StreamId(stream_id), Ok(stream_data)); data_callback(stream_data);
}}; }};
} }
@ -815,23 +658,17 @@ impl EventLoop {
Ok(()) Ok(())
})?; })?;
// TODO: start playing now? is that consistent with the other backends?
audio_unit.start()?; audio_unit.start()?;
// Add the stream to the list of streams within `self`. Ok(Stream::new(StreamInner {
self.add_stream(stream_id, audio_unit, device.audio_device_id); playing: true,
audio_unit,
Ok(StreamId(stream_id)) device_id: self.audio_device_id,
}))
} }
#[inline] fn build_output_stream<D, E>(&self, format: &Format, mut data_callback: D, _error_callback: E) -> Result<Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
fn build_output_stream( let mut audio_unit = audio_unit_from_device(self, false)?;
&self,
device: &Device,
format: &Format,
) -> Result<StreamId, BuildStreamError>
{
let mut audio_unit = audio_unit_from_device(device, false)?;
// The scope and element for working with a device's output stream. // The scope and element for working with a device's output stream.
let scope = Scope::Input; let scope = Scope::Input;
@ -841,12 +678,8 @@ impl EventLoop {
let asbd = asbd_from_format(format); let asbd = asbd_from_format(format);
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
// Determine the future ID of the stream.
let stream_id = self.next_stream_id();
// Register the callback that is being called by coreaudio whenever it needs data to be // Register the callback that is being called by coreaudio whenever it needs data to be
// fed to the audio buffer. // fed to the audio buffer.
let user_callback = self.user_callback.clone();
let sample_format = format.data_type; let sample_format = format.data_type;
let bytes_per_channel = format.data_type.sample_size(); let bytes_per_channel = format.data_type.sample_size();
type Args = render_callback::Args<data::Raw>; type Args = render_callback::Args<data::Raw>;
@ -860,25 +693,14 @@ impl EventLoop {
mData: data mData: data
} = (*args.data.data).mBuffers[0]; } = (*args.data.data).mBuffers[0];
let mut user_callback = user_callback.lock().unwrap();
// A small macro to simplify handling the callback for different sample types. // A small macro to simplify handling the callback for different sample types.
macro_rules! try_callback { macro_rules! try_callback {
($SampleFormat:ident, $SampleType:ty, $equilibrium:expr) => {{ ($SampleFormat:ident, $SampleType:ty, $equilibrium:expr) => {{
let data_len = (data_byte_size as usize / bytes_per_channel) as usize; let data_len = (data_byte_size as usize / bytes_per_channel) as usize;
let data_slice = slice::from_raw_parts_mut(data as *mut $SampleType, data_len); let data_slice = slice::from_raw_parts_mut(data as *mut $SampleType, data_len);
let callback = match *user_callback {
UserCallback::Active(ref mut cb) => cb,
UserCallback::Inactive => {
for sample in data_slice.iter_mut() {
*sample = $equilibrium;
}
return Ok(());
}
};
let unknown_type_buffer = UnknownTypeOutputBuffer::$SampleFormat(::OutputBuffer { buffer: data_slice }); let unknown_type_buffer = UnknownTypeOutputBuffer::$SampleFormat(::OutputBuffer { buffer: data_slice });
let stream_data = StreamData::Output { buffer: unknown_type_buffer }; let stream_data = StreamData::Output { buffer: unknown_type_buffer };
callback(StreamId(stream_id), Ok(stream_data)); data_callback(stream_data);
}}; }};
} }
@ -891,25 +713,31 @@ impl EventLoop {
Ok(()) Ok(())
})?; })?;
// TODO: start playing now? is that consistent with the other backends?
audio_unit.start()?; audio_unit.start()?;
// Add the stream to the list of streams within `self`. Ok(Stream::new(StreamInner {
self.add_stream(stream_id, audio_unit, device.audio_device_id); playing: true,
audio_unit,
Ok(StreamId(stream_id)) device_id: self.audio_device_id,
}))
} }
}
fn destroy_stream(&self, stream_id: StreamId) { pub struct Stream {
{ inner: RefCell<StreamInner>,
let mut streams = self.streams.lock().unwrap(); }
streams[stream_id.0] = None;
impl Stream {
fn new(inner: StreamInner) -> Self {
Self {
inner: RefCell::new(inner),
} }
} }
}
fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> { impl StreamTrait for Stream {
let mut streams = self.streams.lock().unwrap(); fn play(&self) -> Result<(), PlayStreamError> {
let stream = streams[stream_id.0].as_mut().unwrap(); let mut stream = self.inner.borrow_mut();
if !stream.playing { if !stream.playing {
if let Err(e) = stream.audio_unit.start() { if let Err(e) = stream.audio_unit.start() {
@ -922,9 +750,8 @@ impl EventLoop {
Ok(()) Ok(())
} }
fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> { fn pause(&self) -> Result<(), PauseStreamError> {
let mut streams = self.streams.lock().unwrap(); let mut stream = self.inner.borrow_mut();
let stream = streams[stream_id.0].as_mut().unwrap();
if stream.playing { if stream.playing {
if let Err(e) = stream.audio_unit.stop() { if let Err(e) = stream.audio_unit.stop() {

View File

@ -1,7 +1,6 @@
use std::mem; use std::mem;
use std::os::raw::c_void; use std::os::raw::c_void;
use std::slice::from_raw_parts; use std::slice::from_raw_parts;
use std::sync::Mutex;
use stdweb; use stdweb;
use stdweb::Reference; use stdweb::Reference;
use stdweb::unstable::TryInto; use stdweb::unstable::TryInto;
@ -17,25 +16,102 @@ use PauseStreamError;
use PlayStreamError; use PlayStreamError;
use SupportedFormatsError; use SupportedFormatsError;
use StreamData; use StreamData;
use StreamDataResult; use StreamError;
use SupportedFormat; use SupportedFormat;
use UnknownTypeOutputBuffer; use UnknownTypeOutputBuffer;
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait}; use traits::{DeviceTrait, HostTrait, StreamTrait};
// The emscripten backend currently works by instantiating an `AudioContext` object per `Stream`.
// Creating a stream creates a new `AudioContext`. Destroying a stream destroys it. Creation of a
// `Host` instance initializes the `stdweb` context.
/// The default emscripten host type. /// The default emscripten host type.
#[derive(Debug)] #[derive(Debug)]
pub struct Host; pub struct Host;
/// Content is false if the iterator is empty.
pub struct Devices(bool);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Device;
pub struct Stream {
// A reference to an `AudioContext` object.
audio_ctxt_ref: Reference,
}
// Index within the `streams` array of the events loop.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StreamId(usize);
pub type SupportedInputFormats = ::std::vec::IntoIter<SupportedFormat>;
pub type SupportedOutputFormats = ::std::vec::IntoIter<SupportedFormat>;
impl Host { impl Host {
pub fn new() -> Result<Self, crate::HostUnavailable> { pub fn new() -> Result<Self, crate::HostUnavailable> {
stdweb::initialize();
Ok(Host) Ok(Host)
} }
} }
impl Devices {
fn new() -> Result<Self, DevicesError> {
Ok(Self::default())
}
}
impl Device {
#[inline]
fn name(&self) -> Result<String, DeviceNameError> {
Ok("Default Device".to_owned())
}
#[inline]
fn supported_input_formats(&self) -> Result<SupportedInputFormats, SupportedFormatsError> {
unimplemented!();
}
#[inline]
fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> {
// TODO: right now cpal's API doesn't allow flexibility here
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
// this ever becomes more flexible, don't forget to change that
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
//
// UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and
// filter out those that lay outside the range specified above.
Ok(
vec![
SupportedFormat {
channels: 2,
min_sample_rate: ::SampleRate(44100),
max_sample_rate: ::SampleRate(44100),
data_type: ::SampleFormat::F32,
},
].into_iter(),
)
}
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
unimplemented!();
}
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
// TODO: because it is hard coded, see supported_output_formats.
Ok(
Format {
channels: 2,
sample_rate: ::SampleRate(44100),
data_type: ::SampleFormat::F32,
},
)
}
}
impl HostTrait for Host { impl HostTrait for Host {
type Devices = Devices; type Devices = Devices;
type Device = Device; type Device = Device;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
// Assume this host is always available on emscripten. // Assume this host is always available on emscripten.
@ -53,15 +129,12 @@ impl HostTrait for Host {
fn default_output_device(&self) -> Option<Self::Device> { fn default_output_device(&self) -> Option<Self::Device> {
default_output_device() default_output_device()
} }
fn event_loop(&self) -> Self::EventLoop {
EventLoop::new()
}
} }
impl DeviceTrait for Device { impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
fn name(&self) -> Result<String, DeviceNameError> { fn name(&self) -> Result<String, DeviceNameError> {
Device::name(self) Device::name(self)
@ -82,224 +155,124 @@ impl DeviceTrait for Device {
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_output_format(self) Device::default_output_format(self)
} }
}
impl EventLoopTrait for EventLoop { fn build_input_stream<D, E>(
type Device = Device;
type StreamId = StreamId;
fn build_input_stream(
&self, &self,
device: &Self::Device, _format: &Format,
format: &Format, _data_callback: D,
) -> Result<Self::StreamId, BuildStreamError> { _error_callback: E,
EventLoop::build_input_stream(self, device, format) ) -> Result<Self::Stream, BuildStreamError>
}
fn build_output_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_output_stream(self, device, format)
}
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
EventLoop::play_stream(self, stream)
}
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
EventLoop::pause_stream(self, stream)
}
fn destroy_stream(&self, stream: Self::StreamId) {
EventLoop::destroy_stream(self, stream)
}
fn run<F>(&self, callback: F) -> !
where where
F: FnMut(Self::StreamId, StreamDataResult) + Send, D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{ {
EventLoop::run(self, callback) unimplemented!()
}
fn build_output_stream<D, E>(
&self,
_format: &Format,
data_callback: D,
error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
// Create the stream.
let audio_ctxt_ref = js!(return new AudioContext()).into_reference().unwrap();
let stream = Stream { audio_ctxt_ref };
// Specify the callback.
let mut user_data = (self, data_callback, error_callback);
let user_data_ptr = &mut user_data as *mut (_, _, _);
// Use `set_timeout` to invoke a Rust callback repeatedly.
//
// The job of this callback is to fill the content of the audio buffers.
//
// See also: The call to `set_timeout` at the end of the `audio_callback_fn` which creates
// the loop.
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr as *mut c_void), 10);
Ok(stream)
} }
} }
impl StreamIdTrait for StreamId {} impl StreamTrait for Stream {
fn play(&self) -> Result<(), PlayStreamError> {
// The emscripten backend works by having a global variable named `_cpal_audio_contexts`, which let audio_ctxt = &self.audio_ctxt_ref;
// is an array of `AudioContext` objects. A stream ID corresponds to an entry in this array. js!(@{audio_ctxt}.resume());
// Ok(())
// Creating a stream creates a new `AudioContext`. Destroying a stream destroys it.
// TODO: handle latency better ; right now we just use setInterval with the amount of sound data
// that is in each buffer ; this is obviously bad, and also the schedule is too tight and there may
// be underflows
pub struct EventLoop {
streams: Mutex<Vec<Option<Reference>>>,
}
impl EventLoop {
#[inline]
pub fn new() -> EventLoop {
stdweb::initialize();
EventLoop {
streams: Mutex::new(Vec::new()),
}
} }
#[inline] fn pause(&self) -> Result<(), PauseStreamError> {
fn run<F>(&self, callback: F) -> ! let audio_ctxt = &self.audio_ctxt_ref;
where F: FnMut(StreamId, StreamDataResult), js!(@{audio_ctxt}.suspend());
{ Ok(())
// The `run` function uses `set_timeout` to invoke a Rust callback repeatidely. The job }
// of this callback is to fill the content of the audio buffers. }
// The first argument of the callback function (a `void*`) is a casted pointer to `self` // The first argument of the callback function (a `void*`) is a casted pointer to `self`
// and to the `callback` parameter that was passed to `run`. // and to the `callback` parameter that was passed to `run`.
fn audio_callback_fn<D, E>(user_data_ptr: *mut c_void)
where
D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
unsafe {
let user_data_ptr2 = user_data_ptr as *mut (&Stream, D, E);
let user_data = &mut *user_data_ptr2;
let (ref stream, ref mut data_cb, ref mut _err_cb) = user_data;
let audio_ctxt = &stream.audio_ctxt_ref;
// TODO: We should be re-using a buffer.
let mut temporary_buffer = vec![0.0; 44100 * 2 / 3];
fn callback_fn<F>(user_data_ptr: *mut c_void)
where F: FnMut(StreamId, StreamDataResult)
{ {
unsafe { let buffer = UnknownTypeOutputBuffer::F32(::OutputBuffer { buffer: &mut temporary_buffer });
let user_data_ptr2 = user_data_ptr as *mut (&EventLoop, F); let data = StreamData::Output { buffer: buffer };
let user_data = &mut *user_data_ptr2; data_cb(data);
let user_cb = &mut user_data.1;
let streams = user_data.0.streams.lock().unwrap().clone();
for (stream_id, stream) in streams.iter().enumerate() {
let stream = match stream.as_ref() {
Some(v) => v,
None => continue,
};
let mut temporary_buffer = vec![0.0; 44100 * 2 / 3];
{
let buffer = UnknownTypeOutputBuffer::F32(::OutputBuffer { buffer: &mut temporary_buffer });
let data = StreamData::Output { buffer: buffer };
user_cb(StreamId(stream_id), Ok(data));
// TODO: directly use a TypedArray<f32> once this is supported by stdweb
}
let typed_array = {
let f32_slice = temporary_buffer.as_slice();
let u8_slice: &[u8] = from_raw_parts(
f32_slice.as_ptr() as *const _,
f32_slice.len() * mem::size_of::<f32>(),
);
let typed_array: TypedArray<u8> = u8_slice.into();
typed_array
};
let num_channels = 2u32; // TODO: correct value
debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
js!(
var src_buffer = new Float32Array(@{typed_array}.buffer);
var context = @{stream};
var buf_len = @{temporary_buffer.len() as u32};
var num_channels = @{num_channels};
var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
for (var channel = 0; channel < num_channels; ++channel) {
var buffer_content = buffer.getChannelData(channel);
for (var i = 0; i < buf_len / num_channels; ++i) {
buffer_content[i] = src_buffer[i * num_channels + channel];
}
}
var node = context.createBufferSource();
node.buffer = buffer;
node.connect(context.destination);
node.start();
);
}
set_timeout(|| callback_fn::<F>(user_data_ptr), 330);
}
} }
let mut user_data = (self, callback); // TODO: directly use a TypedArray<f32> once this is supported by stdweb
let user_data_ptr = &mut user_data as *mut (_, _); let typed_array = {
let f32_slice = temporary_buffer.as_slice();
set_timeout(|| callback_fn::<F>(user_data_ptr as *mut _), 10); let u8_slice: &[u8] = from_raw_parts(
f32_slice.as_ptr() as *const _,
stdweb::event_loop(); f32_slice.len() * mem::size_of::<f32>(),
} );
let typed_array: TypedArray<u8> = u8_slice.into();
#[inline] typed_array
fn build_input_stream(&self, _: &Device, _format: &Format) -> Result<StreamId, BuildStreamError> {
unimplemented!();
}
#[inline]
fn build_output_stream(&self, _: &Device, _format: &Format) -> Result<StreamId, BuildStreamError> {
let stream = js!(return new AudioContext()).into_reference().unwrap();
let mut streams = self.streams.lock().unwrap();
let stream_id = if let Some(pos) = streams.iter().position(|v| v.is_none()) {
streams[pos] = Some(stream);
pos
} else {
let l = streams.len();
streams.push(Some(stream));
l
}; };
Ok(StreamId(stream_id)) let num_channels = 2u32; // TODO: correct value
} debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
#[inline] js!(
fn destroy_stream(&self, stream_id: StreamId) { var src_buffer = new Float32Array(@{typed_array}.buffer);
self.streams.lock().unwrap()[stream_id.0] = None; var context = @{audio_ctxt};
} var buf_len = @{temporary_buffer.len() as u32};
var num_channels = @{num_channels};
#[inline] var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> { for (var channel = 0; channel < num_channels; ++channel) {
let streams = self.streams.lock().unwrap(); var buffer_content = buffer.getChannelData(channel);
let stream = streams for (var i = 0; i < buf_len / num_channels; ++i) {
.get(stream_id.0) buffer_content[i] = src_buffer[i * num_channels + channel];
.and_then(|v| v.as_ref()) }
.expect("invalid stream ID"); }
js!(@{stream}.resume());
Ok(())
}
#[inline] var node = context.createBufferSource();
fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> { node.buffer = buffer;
let streams = self.streams.lock().unwrap(); node.connect(context.destination);
let stream = streams node.start();
.get(stream_id.0) );
.and_then(|v| v.as_ref())
.expect("invalid stream ID");
js!(@{stream}.suspend());
Ok(())
}
}
// Index within the `streams` array of the events loop. // TODO: handle latency better ; right now we just use setInterval with the amount of sound
#[derive(Debug, Clone, PartialEq, Eq, Hash)] // data that is in each buffer ; this is obviously bad, and also the schedule is too tight
pub struct StreamId(usize); // and there may be underflows
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr), 330);
// Detects whether the `AudioContext` global variable is available.
fn is_webaudio_available() -> bool {
stdweb::initialize();
js!(if (!AudioContext) {
return false;
} else {
return true;
}).try_into()
.unwrap()
}
// Content is false if the iterator is empty.
pub struct Devices(bool);
impl Devices {
fn new() -> Result<Self, DevicesError> {
Ok(Self::default())
} }
} }
@ -336,54 +309,13 @@ fn default_output_device() -> Option<Device> {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq)] // Detects whether the `AudioContext` global variable is available.
pub struct Device; fn is_webaudio_available() -> bool {
stdweb::initialize();
impl Device { js!(if (!AudioContext) {
#[inline] return false;
fn name(&self) -> Result<String, DeviceNameError> { } else {
Ok("Default Device".to_owned()) return true;
} }).try_into()
.unwrap()
#[inline]
fn supported_input_formats(&self) -> Result<SupportedInputFormats, SupportedFormatsError> {
unimplemented!();
}
#[inline]
fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> {
// TODO: right now cpal's API doesn't allow flexibility here
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
// this ever becomes more flexible, don't forget to change that
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
Ok(
vec![
SupportedFormat {
channels: 2,
min_sample_rate: ::SampleRate(44100),
max_sample_rate: ::SampleRate(44100),
data_type: ::SampleFormat::F32,
},
].into_iter(),
)
}
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
unimplemented!();
}
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
// TODO: because it is hard coded, see supported_output_formats.
Ok(
Format {
channels: 2,
sample_rate: ::SampleRate(44100),
data_type: ::SampleFormat::F32,
},
)
}
} }
pub type SupportedInputFormats = ::std::vec::IntoIter<SupportedFormat>;
pub type SupportedOutputFormats = ::std::vec::IntoIter<SupportedFormat>;

View File

@ -1,5 +1,3 @@
#![allow(dead_code)]
use BuildStreamError; use BuildStreamError;
use DefaultFormatError; use DefaultFormatError;
use DevicesError; use DevicesError;
@ -7,10 +5,11 @@ use DeviceNameError;
use Format; use Format;
use PauseStreamError; use PauseStreamError;
use PlayStreamError; use PlayStreamError;
use StreamDataResult; use StreamData;
use StreamError;
use SupportedFormatsError; use SupportedFormatsError;
use SupportedFormat; use SupportedFormat;
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait}; use traits::{DeviceTrait, HostTrait, StreamTrait};
#[derive(Default)] #[derive(Default)]
pub struct Devices; pub struct Devices;
@ -18,17 +17,16 @@ pub struct Devices;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct Device; pub struct Device;
pub struct EventLoop;
pub struct Host; pub struct Host;
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StreamId; pub struct Stream;
pub struct SupportedInputFormats; pub struct SupportedInputFormats;
pub struct SupportedOutputFormats; pub struct SupportedOutputFormats;
impl Host { impl Host {
#[allow(dead_code)]
pub fn new() -> Result<Self, crate::HostUnavailable> { pub fn new() -> Result<Self, crate::HostUnavailable> {
Ok(Host) Ok(Host)
} }
@ -40,15 +38,10 @@ impl Devices {
} }
} }
impl EventLoop {
pub fn new() -> EventLoop {
EventLoop
}
}
impl DeviceTrait for Device { impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
#[inline] #[inline]
fn name(&self) -> Result<String, DeviceNameError> { fn name(&self) -> Result<String, DeviceNameError> {
@ -74,49 +67,22 @@ impl DeviceTrait for Device {
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
unimplemented!() unimplemented!()
} }
}
impl EventLoopTrait for EventLoop { fn build_input_stream<D, E>(&self, _format: &Format, _data_callback: D, _error_callback: E) -> Result<Self::Stream, BuildStreamError>
type Device = Device; where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
type StreamId = StreamId;
#[inline]
fn run<F>(&self, _callback: F) -> !
where F: FnMut(StreamId, StreamDataResult)
{
loop { /* TODO: don't spin */ }
}
#[inline]
fn build_input_stream(&self, _: &Device, _: &Format) -> Result<StreamId, BuildStreamError> {
Err(BuildStreamError::DeviceNotAvailable)
}
#[inline]
fn build_output_stream(&self, _: &Device, _: &Format) -> Result<StreamId, BuildStreamError> {
Err(BuildStreamError::DeviceNotAvailable)
}
#[inline]
fn destroy_stream(&self, _: StreamId) {
unimplemented!() unimplemented!()
} }
#[inline] /// Create an output stream.
fn play_stream(&self, _: StreamId) -> Result<(), PlayStreamError> { fn build_output_stream<D, E>(&self, _format: &Format, _data_callback: D, _error_callback: E) -> Result<Self::Stream, BuildStreamError>
panic!() where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static{
} unimplemented!()
#[inline]
fn pause_stream(&self, _: StreamId) -> Result<(), PauseStreamError> {
panic!()
} }
} }
impl HostTrait for Host { impl HostTrait for Host {
type Device = Device; type Device = Device;
type Devices = Devices; type Devices = Devices;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
false false
@ -133,13 +99,17 @@ impl HostTrait for Host {
fn default_output_device(&self) -> Option<Device> { fn default_output_device(&self) -> Option<Device> {
None None
} }
fn event_loop(&self) -> Self::EventLoop {
EventLoop::new()
}
} }
impl StreamIdTrait for StreamId {} impl StreamTrait for Stream {
fn play(&self) -> Result<(), PlayStreamError> {
unimplemented!()
}
fn pause(&self) -> Result<(), PauseStreamError> {
unimplemented!()
}
}
impl Iterator for Devices { impl Iterator for Devices {
type Item = Device; type Item = Device;

View File

@ -3,8 +3,8 @@
use super::check_result; use super::check_result;
use std::ptr; use std::ptr;
use super::winapi::um::objbase::{COINIT_MULTITHREADED};
use super::winapi::um::combaseapi::{CoInitializeEx, CoUninitialize}; use super::winapi::um::combaseapi::{CoInitializeEx, CoUninitialize};
use super::winapi::um::objbase::COINIT_MULTITHREADED;
thread_local!(static COM_INITIALIZED: ComInitialized = { thread_local!(static COM_INITIALIZED: ComInitialized = {
unsafe { unsafe {

View File

@ -7,66 +7,54 @@ use std::ops::{Deref, DerefMut};
use std::os::windows::ffi::OsStringExt; use std::os::windows::ffi::OsStringExt;
use std::ptr; use std::ptr;
use std::slice; use std::slice;
use std::sync::{Arc, Mutex, MutexGuard}; use std::sync::{Arc, Mutex, MutexGuard, atomic::Ordering};
use BackendSpecificError; use BackendSpecificError;
use DefaultFormatError; use DefaultFormatError;
use DeviceNameError; use DeviceNameError;
use DevicesError; use DevicesError;
use Format; use Format;
use SupportedFormatsError;
use SampleFormat; use SampleFormat;
use SampleRate; use SampleRate;
use SupportedFormat; use SupportedFormat;
use SupportedFormatsError;
use COMMON_SAMPLE_RATES; use COMMON_SAMPLE_RATES;
use super::check_result; use super::check_result;
use super::check_result_backend_specific; use super::check_result_backend_specific;
use super::com; use super::com;
use super::winapi::Interface;
use super::winapi::ctypes::c_void; use super::winapi::ctypes::c_void;
use super::winapi::shared::devpkey; use super::winapi::shared::devpkey;
use super::winapi::shared::guiddef::GUID;
use super::winapi::shared::ksmedia; use super::winapi::shared::ksmedia;
use super::winapi::shared::guiddef::{ use super::winapi::shared::minwindef::{DWORD, WORD};
GUID,
};
use super::winapi::shared::winerror;
use super::winapi::shared::minwindef::{
DWORD,
};
use super::winapi::shared::mmreg; use super::winapi::shared::mmreg;
use super::winapi::shared::winerror;
use super::winapi::shared::wtypes; use super::winapi::shared::wtypes;
use super::winapi::Interface;
// https://msdn.microsoft.com/en-us/library/cc230355.aspx // https://msdn.microsoft.com/en-us/library/cc230355.aspx
use super::winapi::um::winnt::LPWSTR;
use super::winapi::um::winnt::WCHAR;
use super::winapi::um::coml2api;
use super::winapi::um::audioclient::{ use super::winapi::um::audioclient::{
IAudioClient, self, IAudioClient, IID_IAudioClient, AUDCLNT_E_DEVICE_INVALIDATED,
IID_IAudioClient,
AUDCLNT_E_DEVICE_INVALIDATED,
}; };
use super::winapi::um::audiosessiontypes::{ use super::winapi::um::audiosessiontypes::{
AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
}; };
use super::winapi::um::combaseapi::{ use super::winapi::um::combaseapi::{
CoCreateInstance, CoCreateInstance, CoTaskMemFree, PropVariantClear, CLSCTX_ALL,
CoTaskMemFree,
CLSCTX_ALL,
PropVariantClear,
}; };
use super::winapi::um::coml2api;
use super::winapi::um::mmdeviceapi::{ use super::winapi::um::mmdeviceapi::{
eAll, eAll, eCapture, eConsole, eRender, CLSID_MMDeviceEnumerator, EDataFlow, IMMDevice,
eCapture, IMMDeviceCollection, IMMDeviceEnumerator, IMMEndpoint, DEVICE_STATE_ACTIVE,
eConsole,
eRender,
CLSID_MMDeviceEnumerator,
DEVICE_STATE_ACTIVE,
EDataFlow,
IMMDevice,
IMMDeviceCollection,
IMMDeviceEnumerator,
IMMEndpoint,
}; };
use super::winapi::um::winnt::LPWSTR;
use super::winapi::um::winnt::WCHAR;
use super::{
stream::{AudioClientFlow, Stream, StreamInner},
winapi::um::synchapi,
};
use crate::{traits::DeviceTrait, BuildStreamError, StreamData, StreamError};
pub type SupportedInputFormats = std::vec::IntoIter<SupportedFormat>; pub type SupportedInputFormats = std::vec::IntoIter<SupportedFormat>;
pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>; pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
@ -74,10 +62,8 @@ pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
/// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers. /// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers.
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
struct IAudioClientWrapper(*mut IAudioClient); struct IAudioClientWrapper(*mut IAudioClient);
unsafe impl Send for IAudioClientWrapper { unsafe impl Send for IAudioClientWrapper {}
} unsafe impl Sync for IAudioClientWrapper {}
unsafe impl Sync for IAudioClientWrapper {
}
/// An opaque type that identifies an end point. /// An opaque type that identifies an end point.
pub struct Device { pub struct Device {
@ -87,6 +73,70 @@ pub struct Device {
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
} }
impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
fn name(&self) -> Result<String, DeviceNameError> {
Device::name(self)
}
fn supported_input_formats(
&self,
) -> Result<Self::SupportedInputFormats, SupportedFormatsError> {
Device::supported_input_formats(self)
}
fn supported_output_formats(
&self,
) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> {
Device::supported_output_formats(self)
}
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_input_format(self)
}
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_output_format(self)
}
fn build_input_stream<D, E>(
&self,
format: &Format,
data_callback: D,
error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
Ok(Stream::new(
self.build_input_stream_inner(format)?,
data_callback,
error_callback,
))
}
fn build_output_stream<D, E>(
&self,
format: &Format,
data_callback: D,
error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(StreamData) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
Ok(Stream::new(
self.build_output_stream_inner(format)?,
data_callback,
error_callback,
))
}
}
struct Endpoint { struct Endpoint {
endpoint: *mut IMMEndpoint, endpoint: *mut IMMEndpoint,
} }
@ -107,7 +157,6 @@ impl Drop for WaveFormatExPtr {
} }
} }
impl WaveFormat { impl WaveFormat {
// Given a pointer to some format, returns a valid copy of the format. // Given a pointer to some format, returns a valid copy of the format.
pub fn copy_from_waveformatex_ptr(ptr: *const mmreg::WAVEFORMATEX) -> Option<Self> { pub fn copy_from_waveformatex_ptr(ptr: *const mmreg::WAVEFORMATEX) -> Option<Self> {
@ -115,11 +164,11 @@ impl WaveFormat {
match (*ptr).wFormatTag { match (*ptr).wFormatTag {
mmreg::WAVE_FORMAT_PCM | mmreg::WAVE_FORMAT_IEEE_FLOAT => { mmreg::WAVE_FORMAT_PCM | mmreg::WAVE_FORMAT_IEEE_FLOAT => {
Some(WaveFormat::Ex(*ptr)) Some(WaveFormat::Ex(*ptr))
}, }
mmreg::WAVE_FORMAT_EXTENSIBLE => { mmreg::WAVE_FORMAT_EXTENSIBLE => {
let extensible_ptr = ptr as *const mmreg::WAVEFORMATEXTENSIBLE; let extensible_ptr = ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
Some(WaveFormat::Extensible(*extensible_ptr)) Some(WaveFormat::Extensible(*extensible_ptr))
}, }
_ => None, _ => None,
} }
} }
@ -150,11 +199,12 @@ impl DerefMut for WaveFormat {
} }
} }
unsafe fn immendpoint_from_immdevice(device: *const IMMDevice) -> *mut IMMEndpoint { unsafe fn immendpoint_from_immdevice(device: *const IMMDevice) -> *mut IMMEndpoint {
let mut endpoint: *mut IMMEndpoint = mem::uninitialized(); let mut endpoint: *mut IMMEndpoint = mem::uninitialized();
check_result((*device).QueryInterface(&IMMEndpoint::uuidof(), &mut endpoint as *mut _ as *mut _)) check_result(
.expect("could not query IMMDevice interface for IMMEndpoint"); (*device).QueryInterface(&IMMEndpoint::uuidof(), &mut endpoint as *mut _ as *mut _),
)
.expect("could not query IMMDevice interface for IMMEndpoint");
endpoint endpoint
} }
@ -169,10 +219,7 @@ unsafe fn data_flow_from_immendpoint(endpoint: *const IMMEndpoint) -> EDataFlow
pub unsafe fn is_format_supported( pub unsafe fn is_format_supported(
client: *const IAudioClient, client: *const IAudioClient,
waveformatex_ptr: *const mmreg::WAVEFORMATEX, waveformatex_ptr: *const mmreg::WAVEFORMATEX,
) -> Result<bool, SupportedFormatsError> ) -> Result<bool, SupportedFormatsError> {
{
/* /*
// `IsFormatSupported` checks whether the format is supported and fills // `IsFormatSupported` checks whether the format is supported and fills
// a `WAVEFORMATEX` // a `WAVEFORMATEX`
@ -205,7 +252,6 @@ pub unsafe fn is_format_supported(
}; };
*/ */
// Check if the given format is supported. // Check if the given format is supported.
let is_supported = |waveformatex_ptr, mut closest_waveformatex_ptr| { let is_supported = |waveformatex_ptr, mut closest_waveformatex_ptr| {
let result = (*client).IsFormatSupported( let result = (*client).IsFormatSupported(
@ -217,17 +263,11 @@ pub unsafe fn is_format_supported(
// has been found, but not an exact match) so we also treat this as unsupported. // has been found, but not an exact match) so we also treat this as unsupported.
match (result, check_result(result)) { match (result, check_result(result)) {
(_, Err(ref e)) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => { (_, Err(ref e)) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(SupportedFormatsError::DeviceNotAvailable); Err(SupportedFormatsError::DeviceNotAvailable)
}, }
(_, Err(_)) => { (_, Err(_)) => Ok(false),
Ok(false) (winerror::S_FALSE, _) => Ok(false),
}, (_, Ok(())) => Ok(true),
(winerror::S_FALSE, _) => {
Ok(false)
},
(_, Ok(())) => {
Ok(true)
},
} }
}; };
@ -240,34 +280,30 @@ pub unsafe fn is_format_supported(
let mut closest_waveformatex = *waveformatex_ptr; let mut closest_waveformatex = *waveformatex_ptr;
let closest_waveformatex_ptr = &mut closest_waveformatex as *mut _; let closest_waveformatex_ptr = &mut closest_waveformatex as *mut _;
is_supported(waveformatex_ptr, closest_waveformatex_ptr) is_supported(waveformatex_ptr, closest_waveformatex_ptr)
}, }
mmreg::WAVE_FORMAT_EXTENSIBLE => { mmreg::WAVE_FORMAT_EXTENSIBLE => {
let waveformatextensible_ptr = let waveformatextensible_ptr = waveformatex_ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
waveformatex_ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
let mut closest_waveformatextensible = *waveformatextensible_ptr; let mut closest_waveformatextensible = *waveformatextensible_ptr;
let closest_waveformatextensible_ptr = let closest_waveformatextensible_ptr = &mut closest_waveformatextensible as *mut _;
&mut closest_waveformatextensible as *mut _;
let closest_waveformatex_ptr = let closest_waveformatex_ptr =
closest_waveformatextensible_ptr as *mut mmreg::WAVEFORMATEX; closest_waveformatextensible_ptr as *mut mmreg::WAVEFORMATEX;
is_supported(waveformatex_ptr, closest_waveformatex_ptr) is_supported(waveformatex_ptr, closest_waveformatex_ptr)
}, }
_ => Ok(false), _ => Ok(false),
} }
} }
// Get a cpal Format from a WAVEFORMATEX. // Get a cpal Format from a WAVEFORMATEX.
unsafe fn format_from_waveformatex_ptr( unsafe fn format_from_waveformatex_ptr(
waveformatex_ptr: *const mmreg::WAVEFORMATEX, waveformatex_ptr: *const mmreg::WAVEFORMATEX,
) -> Option<Format> ) -> Option<Format> {
{
fn cmp_guid(a: &GUID, b: &GUID) -> bool { fn cmp_guid(a: &GUID, b: &GUID) -> bool {
a.Data1 == b.Data1 a.Data1 == b.Data1 && a.Data2 == b.Data2 && a.Data3 == b.Data3 && a.Data4 == b.Data4
&& a.Data2 == b.Data2
&& a.Data3 == b.Data3
&& a.Data4 == b.Data4
} }
let data_type = match ((*waveformatex_ptr).wBitsPerSample, (*waveformatex_ptr).wFormatTag) { let data_type = match (
(*waveformatex_ptr).wBitsPerSample,
(*waveformatex_ptr).wFormatTag,
) {
(16, mmreg::WAVE_FORMAT_PCM) => SampleFormat::I16, (16, mmreg::WAVE_FORMAT_PCM) => SampleFormat::I16,
(32, mmreg::WAVE_FORMAT_IEEE_FLOAT) => SampleFormat::F32, (32, mmreg::WAVE_FORMAT_IEEE_FLOAT) => SampleFormat::F32,
(n_bits, mmreg::WAVE_FORMAT_EXTENSIBLE) => { (n_bits, mmreg::WAVE_FORMAT_EXTENSIBLE) => {
@ -280,22 +316,20 @@ unsafe fn format_from_waveformatex_ptr(
} else { } else {
return None; return None;
} }
}, }
// Unknown data format returned by GetMixFormat. // Unknown data format returned by GetMixFormat.
_ => return None, _ => return None,
}; };
let format = Format { let format = Format {
channels: (*waveformatex_ptr).nChannels as _, channels: (*waveformatex_ptr).nChannels as _,
sample_rate: SampleRate((*waveformatex_ptr).nSamplesPerSec), sample_rate: SampleRate((*waveformatex_ptr).nSamplesPerSec),
data_type: data_type, data_type,
}; };
Some(format) Some(format)
} }
unsafe impl Send for Device { unsafe impl Send for Device {}
} unsafe impl Sync for Device {}
unsafe impl Sync for Device {
}
impl Device { impl Device {
pub fn name(&self) -> Result<String, DeviceNameError> { pub fn name(&self) -> Result<String, DeviceNameError> {
@ -306,12 +340,10 @@ impl Device {
// Get the endpoint's friendly-name property. // Get the endpoint's friendly-name property.
let mut property_value = mem::zeroed(); let mut property_value = mem::zeroed();
if let Err(err) = check_result( if let Err(err) = check_result((*property_store).GetValue(
(*property_store).GetValue( &devpkey::DEVPKEY_Device_FriendlyName as *const _ as *const _,
&devpkey::DEVPKEY_Device_FriendlyName as *const _ as *const _, &mut property_value,
&mut property_value )) {
)
) {
let description = format!("failed to retrieve name from property store: {}", err); let description = format!("failed to retrieve name from property store: {}", err);
let err = BackendSpecificError { description }; let err = BackendSpecificError { description };
return Err(err.into()); return Err(err.into());
@ -319,13 +351,14 @@ impl Device {
// Read the friendly-name from the union data field, expecting a *const u16. // Read the friendly-name from the union data field, expecting a *const u16.
if property_value.vt != wtypes::VT_LPWSTR as _ { if property_value.vt != wtypes::VT_LPWSTR as _ {
let description = let description = format!(
format!("property store produced invalid data: {:?}", property_value.vt); "property store produced invalid data: {:?}",
property_value.vt
);
let err = BackendSpecificError { description }; let err = BackendSpecificError { description };
return Err(err.into()); return Err(err.into());
} }
let ptr_usize: usize = *(&property_value.data as *const _ as *const usize); let ptr_utf16 = *(&property_value.data as *const _ as *const (*const u16));
let ptr_utf16 = ptr_usize as *const u16;
// Find the length of the friendly name. // Find the length of the friendly name.
let mut len = 0; let mut len = 0;
@ -351,14 +384,15 @@ impl Device {
#[inline] #[inline]
fn from_immdevice(device: *mut IMMDevice) -> Self { fn from_immdevice(device: *mut IMMDevice) -> Self {
Device { Device {
device: device, device,
future_audio_client: Arc::new(Mutex::new(None)), future_audio_client: Arc::new(Mutex::new(None)),
} }
} }
/// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it. /// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it.
fn ensure_future_audio_client(&self) fn ensure_future_audio_client(
-> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> { &self,
) -> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
let mut lock = self.future_audio_client.lock().unwrap(); let mut lock = self.future_audio_client.lock().unwrap();
if lock.is_some() { if lock.is_some() {
return Ok(lock); return Ok(lock);
@ -366,10 +400,12 @@ impl Device {
let audio_client: *mut IAudioClient = unsafe { let audio_client: *mut IAudioClient = unsafe {
let mut audio_client = mem::uninitialized(); let mut audio_client = mem::uninitialized();
let hresult = (*self.device).Activate(&IID_IAudioClient, let hresult = (*self.device).Activate(
CLSCTX_ALL, &IID_IAudioClient,
ptr::null_mut(), CLSCTX_ALL,
&mut audio_client); ptr::null_mut(),
&mut audio_client,
);
// can fail if the device has been disconnected since we enumerated it, or if // can fail if the device has been disconnected since we enumerated it, or if
// the device doesn't support playback for some reason // the device doesn't support playback for some reason
@ -416,7 +452,7 @@ impl Device {
let description = format!("{}", e); let description = format!("{}", e);
let err = BackendSpecificError { description }; let err = BackendSpecificError { description };
return Err(err.into()); return Err(err.into());
}, }
}; };
let client = lock.unwrap().0; let client = lock.unwrap().0;
@ -427,16 +463,19 @@ impl Device {
Ok(()) => (), Ok(()) => (),
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => { Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(SupportedFormatsError::DeviceNotAvailable); return Err(SupportedFormatsError::DeviceNotAvailable);
}, }
Err(e) => { Err(e) => {
let description = format!("{}", e); let description = format!("{}", e);
let err = BackendSpecificError { description }; let err = BackendSpecificError { description };
return Err(err.into()); return Err(err.into());
}, }
}; };
// If the default format can't succeed we have no hope of finding other formats. // If the default format can't succeed we have no hope of finding other formats.
assert_eq!(try!(is_format_supported(client, default_waveformatex_ptr.0)), true); assert_eq!(
is_format_supported(client, default_waveformatex_ptr.0)?,
true
);
// Copy the format to use as a test format (as to avoid mutating the original format). // Copy the format to use as a test format (as to avoid mutating the original format).
let mut test_format = { let mut test_format = {
@ -457,8 +496,8 @@ impl Device {
let rate = rate.0 as DWORD; let rate = rate.0 as DWORD;
test_format.nSamplesPerSec = rate; test_format.nSamplesPerSec = rate;
test_format.nAvgBytesPerSec = test_format.nAvgBytesPerSec =
rate * (*default_waveformatex_ptr.0).nBlockAlign as DWORD; rate * u32::from((*default_waveformatex_ptr.0).nBlockAlign);
if try!(is_format_supported(client, test_format.as_ptr())) { if is_format_supported(client, test_format.as_ptr())? {
supported_sample_rates.push(rate); supported_sample_rates.push(rate);
} }
} }
@ -503,7 +542,9 @@ impl Device {
} }
} }
pub fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> { pub fn supported_output_formats(
&self,
) -> Result<SupportedOutputFormats, SupportedFormatsError> {
if self.data_flow() == eRender { if self.data_flow() == eRender {
self.supported_formats() self.supported_formats()
// If it's an input device, assume no output formats. // If it's an input device, assume no output formats.
@ -538,12 +579,12 @@ impl Device {
match check_result((*client).GetMixFormat(&mut format_ptr.0)) { match check_result((*client).GetMixFormat(&mut format_ptr.0)) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => { Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(DefaultFormatError::DeviceNotAvailable); return Err(DefaultFormatError::DeviceNotAvailable);
}, }
Err(e) => { Err(e) => {
let description = format!("{}", e); let description = format!("{}", e);
let err = BackendSpecificError { description }; let err = BackendSpecificError { description };
return Err(err.into()); return Err(err.into());
}, }
Ok(()) => (), Ok(()) => (),
}; };
@ -573,6 +614,295 @@ impl Device {
Err(DefaultFormatError::StreamTypeNotSupported) Err(DefaultFormatError::StreamTypeNotSupported)
} }
} }
pub(crate) fn build_input_stream_inner(
&self,
format: &Format,
) -> Result<StreamInner, BuildStreamError> {
unsafe {
// Making sure that COM is initialized.
// It's not actually sure that this is required, but when in doubt do it.
com::com_initialized();
// Obtaining a `IAudioClient`.
let audio_client = match self.build_audioclient() {
Ok(client) => client,
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(BuildStreamError::DeviceNotAvailable)
}
Err(e) => {
let description = format!("{}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
};
// Computing the format and initializing the device.
let waveformatex = {
let format_attempt = format_to_waveformatextensible(format)
.ok_or(BuildStreamError::FormatNotSupported)?;
let share_mode = AUDCLNT_SHAREMODE_SHARED;
// Ensure the format is supported.
match super::device::is_format_supported(audio_client, &format_attempt.Format) {
Ok(false) => return Err(BuildStreamError::FormatNotSupported),
Err(_) => return Err(BuildStreamError::DeviceNotAvailable),
_ => (),
}
// finally initializing the audio client
let hresult = (*audio_client).Initialize(
share_mode,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
0,
0,
&format_attempt.Format,
ptr::null(),
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("{}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
format_attempt.Format
};
// obtaining the size of the samples buffer in number of frames
let max_frames_in_buffer = {
let mut max_frames_in_buffer = mem::uninitialized();
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("{}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
max_frames_in_buffer
};
// Creating the event that will be signalled whenever we need to submit some samples.
let event = {
let event = synchapi::CreateEventA(ptr::null_mut(), 0, 0, ptr::null());
if event.is_null() {
(*audio_client).Release();
let description = "failed to create event".to_string();
let err = BackendSpecificError { description };
return Err(err.into());
}
if let Err(e) = check_result((*audio_client).SetEventHandle(event)) {
(*audio_client).Release();
let description = format!("failed to call SetEventHandle: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
event
};
// Building a `IAudioCaptureClient` that will be used to read captured samples.
let capture_client = {
let mut capture_client: *mut audioclient::IAudioCaptureClient =
mem::uninitialized();
let hresult = (*audio_client).GetService(
&audioclient::IID_IAudioCaptureClient,
&mut capture_client as *mut *mut audioclient::IAudioCaptureClient as *mut _,
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("failed to build capture client: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
&mut *capture_client
};
// Once we built the `StreamInner`, we add a command that will be picked up by the
// `run()` method and added to the `RunContext`.
let client_flow = AudioClientFlow::Capture { capture_client };
Ok(StreamInner {
audio_client,
client_flow,
event,
playing: false,
max_frames_in_buffer,
bytes_per_frame: waveformatex.nBlockAlign,
sample_format: format.data_type,
})
}
}
pub(crate) fn build_output_stream_inner(
&self,
format: &Format,
) -> Result<StreamInner, BuildStreamError> {
unsafe {
// Making sure that COM is initialized.
// It's not actually sure that this is required, but when in doubt do it.
com::com_initialized();
// Obtaining a `IAudioClient`.
let audio_client = match self.build_audioclient() {
Ok(client) => client,
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(BuildStreamError::DeviceNotAvailable)
}
Err(e) => {
let description = format!("{}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
};
// Computing the format and initializing the device.
let waveformatex = {
let format_attempt = format_to_waveformatextensible(format)
.ok_or(BuildStreamError::FormatNotSupported)?;
let share_mode = AUDCLNT_SHAREMODE_SHARED;
// Ensure the format is supported.
match super::device::is_format_supported(audio_client, &format_attempt.Format) {
Ok(false) => return Err(BuildStreamError::FormatNotSupported),
Err(_) => return Err(BuildStreamError::DeviceNotAvailable),
_ => (),
}
// finally initializing the audio client
let hresult = (*audio_client).Initialize(
share_mode,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
0,
0,
&format_attempt.Format,
ptr::null(),
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("{}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
format_attempt.Format
};
// Creating the event that will be signalled whenever we need to submit some samples.
let event = {
let event = synchapi::CreateEventA(ptr::null_mut(), 0, 0, ptr::null());
if event.is_null() {
(*audio_client).Release();
let description = "failed to create event".to_string();
let err = BackendSpecificError { description };
return Err(err.into());
}
if let Err(e) = check_result((*audio_client).SetEventHandle(event)) {
(*audio_client).Release();
let description = format!("failed to call SetEventHandle: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
};
event
};
// obtaining the size of the samples buffer in number of frames
let max_frames_in_buffer = {
let mut max_frames_in_buffer = mem::uninitialized();
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("failed to obtain buffer size: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
max_frames_in_buffer
};
// Building a `IAudioRenderClient` that will be used to fill the samples buffer.
let render_client = {
let mut render_client: *mut audioclient::IAudioRenderClient = mem::uninitialized();
let hresult = (*audio_client).GetService(
&audioclient::IID_IAudioRenderClient,
&mut render_client as *mut *mut audioclient::IAudioRenderClient as *mut _,
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(BuildStreamError::DeviceNotAvailable);
}
Err(e) => {
(*audio_client).Release();
let description = format!("failed to build render client: {}", e);
let err = BackendSpecificError { description };
return Err(err.into());
}
Ok(()) => (),
};
&mut *render_client
};
// Once we built the `StreamInner`, we add a command that will be picked up by the
// `run()` method and added to the `RunContext`.
let client_flow = AudioClientFlow::Render { render_client };
Ok(StreamInner {
audio_client,
client_flow,
event,
playing: false,
max_frames_in_buffer,
bytes_per_frame: waveformatex.nBlockAlign,
sample_format: format.data_type,
})
}
}
} }
impl PartialEq for Device { impl PartialEq for Device {
@ -586,38 +916,45 @@ impl PartialEq for Device {
// In this code section we're trying to use the GetId method for the device comparison, cf. // In this code section we're trying to use the GetId method for the device comparison, cf.
// https://docs.microsoft.com/en-us/windows/desktop/api/mmdeviceapi/nf-mmdeviceapi-immdevice-getid // https://docs.microsoft.com/en-us/windows/desktop/api/mmdeviceapi/nf-mmdeviceapi-immdevice-getid
unsafe { unsafe {
struct IdRAII (LPWSTR); struct IdRAII(LPWSTR);
/// RAII for device IDs. /// RAII for device IDs.
impl Drop for IdRAII { impl Drop for IdRAII {
fn drop(&mut self) { fn drop(&mut self) {
unsafe {CoTaskMemFree(self.0 as *mut c_void)} unsafe { CoTaskMemFree(self.0 as *mut c_void) }
} }
} }
let mut id1: LPWSTR = ptr::null_mut(); let mut id1: LPWSTR = ptr::null_mut();
let rc1 = (*self.device).GetId(&mut id1); let rc1 = (*self.device).GetId(&mut id1);
// GetId only fails with E_OUTOFMEMORY and if it does, we're probably dead already. // GetId only fails with E_OUTOFMEMORY and if it does, we're probably dead already.
// Plus it won't do to change the device comparison logic unexpectedly. // Plus it won't do to change the device comparison logic unexpectedly.
if rc1 != winerror::S_OK {panic! ("cpal: GetId failure: {}", rc1)} if rc1 != winerror::S_OK {
panic!("cpal: GetId failure: {}", rc1)
}
let id1 = IdRAII(id1); let id1 = IdRAII(id1);
let mut id2: LPWSTR = ptr::null_mut(); let mut id2: LPWSTR = ptr::null_mut();
let rc2 = (*other.device).GetId(&mut id2); let rc2 = (*other.device).GetId(&mut id2);
if rc2 != winerror::S_OK {panic! ("cpal: GetId failure: {}", rc1)} if rc2 != winerror::S_OK {
panic!("cpal: GetId failure: {}", rc1)
}
let id2 = IdRAII(id2); let id2 = IdRAII(id2);
// 16-bit null-terminated comparison. // 16-bit null-terminated comparison.
let mut offset = 0; let mut offset = 0;
loop { loop {
let w1: WCHAR = *id1.0.offset(offset); let w1: WCHAR = *id1.0.offset(offset);
let w2: WCHAR = *id2.0.offset(offset); let w2: WCHAR = *id2.0.offset(offset);
if w1 == 0 && w2 == 0 {return true} if w1 == 0 && w2 == 0 {
if w1 != w2 {return false} return true;
}
if w1 != w2 {
return false;
}
offset += 1; offset += 1;
} }
} }
} }
} }
impl Eq for Device { impl Eq for Device {}
}
impl Clone for Device { impl Clone for Device {
#[inline] #[inline]
@ -669,16 +1006,14 @@ impl From<*const IMMDevice> for Endpoint {
fn from(device: *const IMMDevice) -> Self { fn from(device: *const IMMDevice) -> Self {
unsafe { unsafe {
let endpoint = immendpoint_from_immdevice(device); let endpoint = immendpoint_from_immdevice(device);
Endpoint { endpoint: endpoint } Endpoint { endpoint }
} }
} }
} }
impl Endpoint { impl Endpoint {
fn data_flow(&self) -> EDataFlow { fn data_flow(&self) -> EDataFlow {
unsafe { unsafe { data_flow_from_immendpoint(self.endpoint) }
data_flow_from_immendpoint(self.endpoint)
}
} }
} }
@ -709,10 +1044,8 @@ lazy_static! {
/// RAII object around `IMMDeviceEnumerator`. /// RAII object around `IMMDeviceEnumerator`.
struct Enumerator(*mut IMMDeviceEnumerator); struct Enumerator(*mut IMMDeviceEnumerator);
unsafe impl Send for Enumerator { unsafe impl Send for Enumerator {}
} unsafe impl Sync for Enumerator {}
unsafe impl Sync for Enumerator {
}
impl Drop for Enumerator { impl Drop for Enumerator {
#[inline] #[inline]
@ -735,20 +1068,18 @@ impl Devices {
unsafe { unsafe {
let mut collection: *mut IMMDeviceCollection = mem::uninitialized(); let mut collection: *mut IMMDeviceCollection = mem::uninitialized();
// can fail because of wrong parameters (should never happen) or out of memory // can fail because of wrong parameters (should never happen) or out of memory
check_result_backend_specific( check_result_backend_specific((*ENUMERATOR.0).EnumAudioEndpoints(
(*ENUMERATOR.0).EnumAudioEndpoints( eAll,
eAll, DEVICE_STATE_ACTIVE,
DEVICE_STATE_ACTIVE, &mut collection,
&mut collection, ))?;
)
)?;
let mut count = mem::uninitialized(); let count = mem::uninitialized();
// can fail if the parameter is null, which should never happen // can fail if the parameter is null, which should never happen
check_result_backend_specific((*collection).GetCount(&mut count))?; check_result_backend_specific((*collection).GetCount(&count))?;
Ok(Devices { Ok(Devices {
collection: collection, collection,
total_count: count, total_count: count,
next_item: 0, next_item: 0,
}) })
@ -756,10 +1087,8 @@ impl Devices {
} }
} }
unsafe impl Send for Devices { unsafe impl Send for Devices {}
} unsafe impl Sync for Devices {}
unsafe impl Sync for Devices {
}
impl Drop for Devices { impl Drop for Devices {
#[inline] #[inline]
@ -799,8 +1128,7 @@ impl Iterator for Devices {
fn default_device(data_flow: EDataFlow) -> Option<Device> { fn default_device(data_flow: EDataFlow) -> Option<Device> {
unsafe { unsafe {
let mut device = mem::uninitialized(); let mut device = mem::uninitialized();
let hres = (*ENUMERATOR.0) let hres = (*ENUMERATOR.0).GetDefaultAudioEndpoint(data_flow, eConsole, &mut device);
.GetDefaultAudioEndpoint(data_flow, eConsole, &mut device);
if let Err(_err) = check_result(hres) { if let Err(_err) = check_result(hres) {
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
} }
@ -815,3 +1143,57 @@ pub fn default_input_device() -> Option<Device> {
pub fn default_output_device() -> Option<Device> { pub fn default_output_device() -> Option<Device> {
default_device(eRender) default_device(eRender)
} }
// Turns a `Format` into a `WAVEFORMATEXTENSIBLE`.
//
// Returns `None` if the WAVEFORMATEXTENSIBLE does not support the given format.
fn format_to_waveformatextensible(format: &Format) -> Option<mmreg::WAVEFORMATEXTENSIBLE> {
let format_tag = match format.data_type {
SampleFormat::I16 => mmreg::WAVE_FORMAT_PCM,
SampleFormat::F32 => mmreg::WAVE_FORMAT_EXTENSIBLE,
SampleFormat::U16 => return None,
};
let channels = format.channels as WORD;
let sample_rate = format.sample_rate.0 as DWORD;
let sample_bytes = format.data_type.sample_size() as WORD;
let avg_bytes_per_sec = u32::from(channels) * sample_rate * u32::from(sample_bytes);
let block_align = channels * sample_bytes;
let bits_per_sample = 8 * sample_bytes;
let cb_size = match format.data_type {
SampleFormat::I16 => 0,
SampleFormat::F32 => {
let extensible_size = mem::size_of::<mmreg::WAVEFORMATEXTENSIBLE>();
let ex_size = mem::size_of::<mmreg::WAVEFORMATEX>();
(extensible_size - ex_size) as WORD
}
SampleFormat::U16 => return None,
};
let waveformatex = mmreg::WAVEFORMATEX {
wFormatTag: format_tag,
nChannels: channels,
nSamplesPerSec: sample_rate,
nAvgBytesPerSec: avg_bytes_per_sec,
nBlockAlign: block_align,
wBitsPerSample: bits_per_sample,
cbSize: cb_size,
};
// CPAL does not care about speaker positions, so pass audio straight through.
// TODO: This constant should be defined in winapi but is missing.
const KSAUDIO_SPEAKER_DIRECTOUT: DWORD = 0;
let channel_mask = KSAUDIO_SPEAKER_DIRECTOUT;
let sub_format = match format.data_type {
SampleFormat::I16 => ksmedia::KSDATAFORMAT_SUBTYPE_PCM,
SampleFormat::F32 => ksmedia::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
SampleFormat::U16 => return None,
};
let waveformatextensible = mmreg::WAVEFORMATEXTENSIBLE {
Format: waveformatex,
Samples: bits_per_sample as WORD,
dwChannelMask: channel_mask,
SubFormat: sub_format,
};
Some(waveformatextensible)
}

View File

@ -1,20 +1,15 @@
extern crate winapi; extern crate winapi;
use BackendSpecificError; pub use self::device::{
use BuildStreamError; default_input_device, default_output_device, Device, Devices, SupportedInputFormats,
use DefaultFormatError; SupportedOutputFormats,
use DeviceNameError; };
use DevicesError; pub use self::stream::Stream;
use Format;
use PlayStreamError;
use PauseStreamError;
use StreamDataResult;
use SupportedFormatsError;
use self::winapi::um::winnt::HRESULT; use self::winapi::um::winnt::HRESULT;
use std::io::Error as IoError; use std::io::Error as IoError;
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait}; use traits::HostTrait;
pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats, default_input_device, default_output_device}; use BackendSpecificError;
pub use self::stream::{EventLoop, StreamId}; use DevicesError;
mod com; mod com;
mod device; mod device;
@ -37,7 +32,6 @@ impl Host {
impl HostTrait for Host { impl HostTrait for Host {
type Devices = Devices; type Devices = Devices;
type Device = Device; type Device = Device;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
// Assume WASAPI is always available on windows. // Assume WASAPI is always available on windows.
@ -55,79 +49,8 @@ impl HostTrait for Host {
fn default_output_device(&self) -> Option<Self::Device> { fn default_output_device(&self) -> Option<Self::Device> {
default_output_device() default_output_device()
} }
fn event_loop(&self) -> Self::EventLoop {
EventLoop::new()
}
} }
impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats;
fn name(&self) -> Result<String, DeviceNameError> {
Device::name(self)
}
fn supported_input_formats(&self) -> Result<Self::SupportedInputFormats, SupportedFormatsError> {
Device::supported_input_formats(self)
}
fn supported_output_formats(&self) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> {
Device::supported_output_formats(self)
}
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_input_format(self)
}
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
Device::default_output_format(self)
}
}
impl EventLoopTrait for EventLoop {
type Device = Device;
type StreamId = StreamId;
fn build_input_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_input_stream(self, device, format)
}
fn build_output_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError> {
EventLoop::build_output_stream(self, device, format)
}
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
EventLoop::play_stream(self, stream)
}
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
EventLoop::pause_stream(self, stream)
}
fn destroy_stream(&self, stream: Self::StreamId) {
EventLoop::destroy_stream(self, stream)
}
fn run<F>(&self, callback: F) -> !
where
F: FnMut(Self::StreamId, StreamDataResult) + Send,
{
EventLoop::run(self, callback)
}
}
impl StreamIdTrait for StreamId {}
#[inline] #[inline]
fn check_result(result: HRESULT) -> Result<(), IoError> { fn check_result(result: HRESULT) -> Result<(), IoError> {
if result < 0 { if result < 0 {
@ -140,9 +63,8 @@ fn check_result(result: HRESULT) -> Result<(), IoError> {
fn check_result_backend_specific(result: HRESULT) -> Result<(), BackendSpecificError> { fn check_result_backend_specific(result: HRESULT) -> Result<(), BackendSpecificError> {
match check_result(result) { match check_result(result) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(err) => { Err(err) => Err(BackendSpecificError {
let description = format!("{}", err); description: format!("{}", err),
return Err(BackendSpecificError { description }); }),
}
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -7,25 +7,21 @@
//! least one [**DefaultHost**](./struct.Host.html) that is guaranteed to be available. //! least one [**DefaultHost**](./struct.Host.html) that is guaranteed to be available.
//! - A [**Device**](./struct.Device.html) is an audio device that may have any number of input and //! - A [**Device**](./struct.Device.html) is an audio device that may have any number of input and
//! output streams. //! output streams.
//! - A stream is an open flow of audio data. Input streams allow you to receive audio data, output //! - A [**Stream**](./trait.Stream.html) is an open flow of audio data. Input streams allow you to
//! streams allow you to play audio data. You must choose which **Device** will run your stream //! receive audio data, output streams allow you to play audio data. You must choose which
//! before you can create one. Often, a default device can be retrieved via the **Host**. //! **Device** will run your stream before you can create one. Often, a default device can be
//! - An [**EventLoop**](./struct.EventLoop.html) is a collection of streams being run by one or //! retrieved via the **Host**.
//! more **Device**s under a single **Host**. Each stream must belong to an **EventLoop**, and
//! all the streams that belong to an **EventLoop** are managed together.
//! //!
//! The first step is to initialise the `Host` (for accessing audio devices) and create an //! The first step is to initialise the `Host`:
//! `EventLoop`:
//! //!
//! ``` //! ```
//! use cpal::traits::HostTrait; //! use cpal::traits::HostTrait;
//! let host = cpal::default_host(); //! let host = cpal::default_host();
//! let event_loop = host.event_loop();
//! ``` //! ```
//! //!
//! Then choose a `Device`. The easiest way is to use the default input or output `Device` via the //! Then choose an available `Device`. The easiest way is to use the default input or output
//! `default_input_device()` or `default_output_device()` functions. Alternatively you can //! `Device` via the `default_input_device()` or `default_output_device()` functions. Alternatively
//! enumerate all the available devices with the `devices()` function. Beware that the //! you can enumerate all the available devices with the `devices()` function. Beware that the
//! `default_*_device()` functions return an `Option` in case no device is available for that //! `default_*_device()` functions return an `Option` in case no device is available for that
//! stream type on the system. //! stream type on the system.
//! //!
@ -56,87 +52,97 @@
//! .with_max_sample_rate(); //! .with_max_sample_rate();
//! ``` //! ```
//! //!
//! Now that we have everything for the stream, we can create it from our event loop: //! Now that we have everything for the stream, we are ready to create it from our selected device:
//! //!
//! ```no_run //! ```no_run
//! use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait}; //! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
//! # let host = cpal::default_host(); //! # let host = cpal::default_host();
//! # let event_loop = host.event_loop();
//! # let device = host.default_output_device().unwrap(); //! # let device = host.default_output_device().unwrap();
//! # let format = device.supported_output_formats().unwrap().next().unwrap().with_max_sample_rate(); //! # let format = device.default_output_format().unwrap();
//! let stream_id = event_loop.build_output_stream(&device, &format).unwrap(); //! let stream = device.build_output_stream(
//! &format,
//! move |data| {
//! // react to stream events and read or write stream data here.
//! },
//! move |err| {
//! // react to errors here.
//! },
//! );
//! ``` //! ```
//! //!
//! The value returned by `build_output_stream()` is of type `StreamId` and is an identifier that //! While the stream is running, the selected audio device will periodically call the data callback
//! will allow you to control the stream. //! that was passed to the function. The callback is passed an instance of type `StreamData` that
//! represents the data that must be read from or written to. The inner `UnknownTypeOutputBuffer`
//! can be one of `I16`, `U16` or `F32` depending on the format that was passed to
//! `build_output_stream`.
//! //!
//! Now we must start the stream. This is done with the `play_stream()` method on the event loop. //! > **Note**: Creating and running a stream will *not* block the thread. On modern platforms, the
//! //! > given callback is called by a dedicated, high-priority thread responsible for delivering
//! ```no_run //! > audio data to the system's audio device in a timely manner. On older platforms that only
//! # use cpal::traits::{EventLoopTrait, HostTrait}; //! > provide a blocking API (e.g. ALSA), CPAL will create a thread in order to consistently
//! # let host = cpal::default_host(); //! > provide non-blocking behaviour (currently this is a thread per stream, but this may change to
//! # let event_loop = host.event_loop(); //! > use a single thread for all streams). *If this is an issue for your platform or design,
//! # let stream_id = unimplemented!(); //! > please share your issue and use-case with the CPAL team on the github issue tracker for
//! event_loop.play_stream(stream_id).expect("failed to play_stream"); //! > consideration.*
//! ```
//!
//! Now everything is ready! We call `run()` on the `event_loop` to begin processing.
//!
//! ```no_run
//! # use cpal::traits::{EventLoopTrait, HostTrait};
//! # let host = cpal::default_host();
//! # let event_loop = host.event_loop();
//! event_loop.run(move |_stream_id, _stream_result| {
//! // react to stream events and read or write stream data here
//! });
//! ```
//!
//! > **Note**: Calling `run()` will block the thread forever, so it's usually best done in a
//! > separate thread.
//!
//! While `run()` is running, the audio device of the user will from time to time call the callback
//! that you passed to this function. The callback gets passed the stream ID and an instance of type
//! `StreamData` that represents the data that must be read from or written to. The inner
//! `UnknownTypeOutputBuffer` can be one of `I16`, `U16` or `F32` depending on the format that was
//! passed to `build_output_stream`.
//! //!
//! In this example, we simply fill the given output buffer with zeroes. //! In this example, we simply fill the given output buffer with zeroes.
//! //!
//! ```no_run //! ```no_run
//! use cpal::{StreamData, UnknownTypeOutputBuffer}; //! use cpal::{StreamData, UnknownTypeOutputBuffer};
//! use cpal::traits::{EventLoopTrait, HostTrait}; //! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
//! # let host = cpal::default_host(); //! # let host = cpal::default_host();
//! # let event_loop = host.event_loop(); //! # let device = host.default_output_device().unwrap();
//! event_loop.run(move |stream_id, stream_result| { //! # let format = device.default_output_format().unwrap();
//! let stream_data = match stream_result { //! let stream = device.build_output_stream(
//! Ok(data) => data, //! &format,
//! Err(err) => { //! move |data| {
//! eprintln!("an error occurred on stream {:?}: {}", stream_id, err); //! match data {
//! return; //! StreamData::Output { buffer: UnknownTypeOutputBuffer::U16(mut buffer) } => {
//! for elem in buffer.iter_mut() {
//! *elem = u16::max_value() / 2;
//! }
//! },
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::I16(mut buffer) } => {
//! for elem in buffer.iter_mut() {
//! *elem = 0;
//! }
//! },
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::F32(mut buffer) } => {
//! for elem in buffer.iter_mut() {
//! *elem = 0.0;
//! }
//! },
//! _ => (),
//! } //! }
//! _ => return, //! },
//! }; //! move |err| {
//! //! eprintln!("an error occurred on the output audio stream: {}", err);
//! match stream_data { //! },
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::U16(mut buffer) } => { //! );
//! for elem in buffer.iter_mut() {
//! *elem = u16::max_value() / 2;
//! }
//! },
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::I16(mut buffer) } => {
//! for elem in buffer.iter_mut() {
//! *elem = 0;
//! }
//! },
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::F32(mut buffer) } => {
//! for elem in buffer.iter_mut() {
//! *elem = 0.0;
//! }
//! },
//! _ => (),
//! }
//! });
//! ``` //! ```
//!
//! Not all platforms automatically run the stream upon creation. To ensure the stream has started,
//! we can use `Stream::play`.
//!
//! ```no_run
//! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
//! # let host = cpal::default_host();
//! # let device = host.default_output_device().unwrap();
//! # let format = device.default_output_format().unwrap();
//! # let stream = device.build_output_stream(&format, move |_data| {}, move |_err| {}).unwrap();
//! stream.play().unwrap();
//! ```
//!
//! Some devices support pausing the audio stream. This can be useful for saving energy in moments
//! of silence.
//!
//! ```no_run
//! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
//! # let host = cpal::default_host();
//! # let device = host.default_output_device().unwrap();
//! # let format = device.default_output_format().unwrap();
//! # let stream = device.build_output_stream(&format, move |_data| {}, move |_err| {}).unwrap();
//! stream.pause().unwrap();
#![recursion_limit = "512"] #![recursion_limit = "512"]
@ -151,11 +157,10 @@ extern crate thiserror;
pub use error::*; pub use error::*;
pub use platform::{ pub use platform::{
ALL_HOSTS, Device, Devices, EventLoop, Host, HostId, SupportedInputFormats, ALL_HOSTS, available_hosts, default_host, Device, Devices, Host, host_from_id,
SupportedOutputFormats, StreamId, available_hosts, default_host, host_from_id, HostId, Stream, SupportedInputFormats, SupportedOutputFormats,
}; };
pub use samples_formats::{Sample, SampleFormat}; pub use samples_formats::{Sample, SampleFormat};
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
mod error; mod error;
@ -198,6 +203,7 @@ pub struct SupportedFormat {
} }
/// Stream data passed to the `EventLoop::run` callback. /// Stream data passed to the `EventLoop::run` callback.
#[derive(Debug)]
pub enum StreamData<'a> { pub enum StreamData<'a> {
Input { Input {
buffer: UnknownTypeInputBuffer<'a>, buffer: UnknownTypeInputBuffer<'a>,
@ -207,16 +213,13 @@ pub enum StreamData<'a> {
}, },
} }
/// Stream data passed to the `EventLoop::run` callback, or an error in the case that the device
/// was invalidated or some backend-specific error occurred.
pub type StreamDataResult<'a> = Result<StreamData<'a>, StreamError>;
/// Represents a buffer containing audio data that may be read. /// Represents a buffer containing audio data that may be read.
/// ///
/// This struct implements the `Deref` trait targeting `[T]`. Therefore this buffer can be read the /// This struct implements the `Deref` trait targeting `[T]`. Therefore this buffer can be read the
/// same way as reading from a `Vec` or any other kind of Rust array. /// same way as reading from a `Vec` or any other kind of Rust array.
// TODO: explain audio stuff in general // TODO: explain audio stuff in general
// TODO: remove the wrapper and just use slices in next major version // TODO: remove the wrapper and just use slices in next major version
#[derive(Debug)]
pub struct InputBuffer<'a, T: 'a> pub struct InputBuffer<'a, T: 'a>
where where
T: Sample, T: Sample,
@ -232,6 +235,7 @@ where
// TODO: explain audio stuff in general // TODO: explain audio stuff in general
// TODO: remove the wrapper and just use slices // TODO: remove the wrapper and just use slices
#[must_use] #[must_use]
#[derive(Debug)]
pub struct OutputBuffer<'a, T: 'a> pub struct OutputBuffer<'a, T: 'a>
where where
T: Sample, T: Sample,
@ -242,6 +246,7 @@ where
/// This is the struct that is provided to you by cpal when you want to read samples from a buffer. /// This is the struct that is provided to you by cpal when you want to read samples from a buffer.
/// ///
/// Since the type of data is only known at runtime, you have to read the right buffer. /// Since the type of data is only known at runtime, you have to read the right buffer.
#[derive(Debug)]
pub enum UnknownTypeInputBuffer<'a> { pub enum UnknownTypeInputBuffer<'a> {
/// Samples whose format is `u16`. /// Samples whose format is `u16`.
U16(InputBuffer<'a, u16>), U16(InputBuffer<'a, u16>),
@ -254,6 +259,7 @@ pub enum UnknownTypeInputBuffer<'a> {
/// This is the struct that is provided to you by cpal when you want to write samples to a buffer. /// This is the struct that is provided to you by cpal when you want to write samples to a buffer.
/// ///
/// Since the type of data is only known at runtime, you have to fill the right buffer. /// Since the type of data is only known at runtime, you have to fill the right buffer.
#[derive(Debug)]
pub enum UnknownTypeOutputBuffer<'a> { pub enum UnknownTypeOutputBuffer<'a> {
/// Samples whose format is `u16`. /// Samples whose format is `u16`.
U16(OutputBuffer<'a, u16>), U16(OutputBuffer<'a, u16>),

View File

@ -58,14 +58,14 @@ macro_rules! impl_platform_host {
/// type. /// type.
pub struct Devices(DevicesInner); pub struct Devices(DevicesInner);
/// The **EventLoop** implementation associated with the platform's dynamically dispatched /// The **Stream** implementation associated with the platform's dynamically dispatched
/// **Host** type. /// **Host** type.
pub struct EventLoop(EventLoopInner); // Streams cannot be `Send` or `Sync` if we plan to support Android's AAudio API. This is
// because the stream API is not thread-safe, and the API prohibits calling certain
/// The **StreamId** implementation associated with the platform's dynamically dispatched // functions within the callback.
/// **Host** type. //
#[derive(Clone, Debug, Eq, Hash, PartialEq)] // TODO: Confirm this and add more specific detail and references.
pub struct StreamId(StreamIdInner); pub struct Stream(StreamInner, crate::platform::NotSendSyncAcrossAllPlatforms);
/// The **SupportedInputFormats** iterator associated with the platform's dynamically /// The **SupportedInputFormats** iterator associated with the platform's dynamically
/// dispatched **Host** type. /// dispatched **Host** type.
@ -95,22 +95,15 @@ macro_rules! impl_platform_host {
)* )*
} }
enum EventLoopInner {
$(
$HostVariant(crate::host::$host_mod::EventLoop),
)*
}
enum HostInner { enum HostInner {
$( $(
$HostVariant(crate::host::$host_mod::Host), $HostVariant(crate::host::$host_mod::Host),
)* )*
} }
#[derive(Clone, Debug, Eq, Hash, PartialEq)] enum StreamInner {
enum StreamIdInner {
$( $(
$HostVariant(crate::host::$host_mod::StreamId), $HostVariant(crate::host::$host_mod::Stream),
)* )*
} }
@ -154,7 +147,7 @@ macro_rules! impl_platform_host {
match self.0 { match self.0 {
$( $(
DevicesInner::$HostVariant(ref mut d) => { DevicesInner::$HostVariant(ref mut d) => {
d.next().map(DeviceInner::$HostVariant).map(Device) d.next().map(DeviceInner::$HostVariant).map(Device::from)
} }
)* )*
} }
@ -212,6 +205,7 @@ macro_rules! impl_platform_host {
impl crate::traits::DeviceTrait for Device { impl crate::traits::DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputFormats = SupportedInputFormats;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputFormats = SupportedOutputFormats;
type Stream = Stream;
fn name(&self) -> Result<String, crate::DeviceNameError> { fn name(&self) -> Result<String, crate::DeviceNameError> {
match self.0 { match self.0 {
@ -260,96 +254,25 @@ macro_rules! impl_platform_host {
)* )*
} }
} }
}
impl crate::traits::EventLoopTrait for EventLoop { fn build_input_stream<D, E>(&self, format: &crate::Format, data_callback: D, error_callback: E) -> Result<Self::Stream, crate::BuildStreamError>
type StreamId = StreamId; where D: FnMut(crate::StreamData) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static {
type Device = Device;
#[allow(unreachable_patterns)]
fn build_input_stream(
&self,
device: &Self::Device,
format: &crate::Format,
) -> Result<Self::StreamId, crate::BuildStreamError> {
match (&self.0, &device.0) {
$(
(&EventLoopInner::$HostVariant(ref e), &DeviceInner::$HostVariant(ref d)) => {
e.build_input_stream(d, format)
.map(StreamIdInner::$HostVariant)
.map(StreamId)
}
)*
_ => panic!("tried to build a stream with a device from another host"),
}
}
#[allow(unreachable_patterns)]
fn build_output_stream(
&self,
device: &Self::Device,
format: &crate::Format,
) -> Result<Self::StreamId, crate::BuildStreamError> {
match (&self.0, &device.0) {
$(
(&EventLoopInner::$HostVariant(ref e), &DeviceInner::$HostVariant(ref d)) => {
e.build_output_stream(d, format)
.map(StreamIdInner::$HostVariant)
.map(StreamId)
}
)*
_ => panic!("tried to build a stream with a device from another host"),
}
}
#[allow(unreachable_patterns)]
fn play_stream(&self, stream: Self::StreamId) -> Result<(), crate::PlayStreamError> {
match (&self.0, stream.0) {
$(
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
e.play_stream(s.clone())
}
)*
_ => panic!("tried to play a stream with an ID associated with another host"),
}
}
#[allow(unreachable_patterns)]
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), crate::PauseStreamError> {
match (&self.0, stream.0) {
$(
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
e.pause_stream(s.clone())
}
)*
_ => panic!("tried to pause a stream with an ID associated with another host"),
}
}
#[allow(unreachable_patterns)]
fn destroy_stream(&self, stream: Self::StreamId) {
match (&self.0, stream.0) {
$(
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
e.destroy_stream(s.clone())
}
)*
_ => panic!("tried to destroy a stream with an ID associated with another host"),
}
}
fn run<F>(&self, mut callback: F) -> !
where
F: FnMut(Self::StreamId, crate::StreamDataResult) + Send
{
match self.0 { match self.0 {
$( $(
EventLoopInner::$HostVariant(ref e) => { DeviceInner::$HostVariant(ref d) => d.build_input_stream(format, data_callback, error_callback)
e.run(|id, result| { .map(StreamInner::$HostVariant)
let result = result; .map(Stream::from),
callback(StreamId(StreamIdInner::$HostVariant(id)), result); )*
}); }
}, }
fn build_output_stream<D, E>(&self, format: &crate::Format, data_callback: D, error_callback: E) -> Result<Self::Stream, crate::BuildStreamError>
where D: FnMut(crate::StreamData) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static {
match self.0 {
$(
DeviceInner::$HostVariant(ref d) => d.build_output_stream(format, data_callback, error_callback)
.map(StreamInner::$HostVariant)
.map(Stream::from),
)* )*
} }
} }
@ -358,7 +281,6 @@ macro_rules! impl_platform_host {
impl crate::traits::HostTrait for Host { impl crate::traits::HostTrait for Host {
type Devices = Devices; type Devices = Devices;
type Device = Device; type Device = Device;
type EventLoop = EventLoop;
fn is_available() -> bool { fn is_available() -> bool {
$( crate::host::$host_mod::Host::is_available() ||)* false $( crate::host::$host_mod::Host::is_available() ||)* false
@ -368,7 +290,7 @@ macro_rules! impl_platform_host {
match self.0 { match self.0 {
$( $(
HostInner::$HostVariant(ref h) => { HostInner::$HostVariant(ref h) => {
h.devices().map(DevicesInner::$HostVariant).map(Devices) h.devices().map(DevicesInner::$HostVariant).map(Devices::from)
} }
)* )*
} }
@ -378,7 +300,7 @@ macro_rules! impl_platform_host {
match self.0 { match self.0 {
$( $(
HostInner::$HostVariant(ref h) => { HostInner::$HostVariant(ref h) => {
h.default_input_device().map(DeviceInner::$HostVariant).map(Device) h.default_input_device().map(DeviceInner::$HostVariant).map(Device::from)
} }
)* )*
} }
@ -388,53 +310,81 @@ macro_rules! impl_platform_host {
match self.0 { match self.0 {
$( $(
HostInner::$HostVariant(ref h) => { HostInner::$HostVariant(ref h) => {
h.default_output_device().map(DeviceInner::$HostVariant).map(Device) h.default_output_device().map(DeviceInner::$HostVariant).map(Device::from)
}
)*
}
}
fn event_loop(&self) -> Self::EventLoop {
match self.0 {
$(
HostInner::$HostVariant(ref h) => {
EventLoop(EventLoopInner::$HostVariant(h.event_loop()))
} }
)* )*
} }
} }
} }
impl crate::traits::StreamIdTrait for StreamId {} impl crate::traits::StreamTrait for Stream {
fn play(&self) -> Result<(), crate::PlayStreamError> {
match self.0 {
$(
StreamInner::$HostVariant(ref s) => {
s.play()
}
)*
}
}
fn pause(&self) -> Result<(), crate::PauseStreamError> {
match self.0 {
$(
StreamInner::$HostVariant(ref s) => {
s.pause()
}
)*
}
}
}
impl From<DeviceInner> for Device {
fn from(d: DeviceInner) -> Self {
Device(d)
}
}
impl From<DevicesInner> for Devices {
fn from(d: DevicesInner) -> Self {
Devices(d)
}
}
impl From<HostInner> for Host {
fn from(h: HostInner) -> Self {
Host(h)
}
}
impl From<StreamInner> for Stream {
fn from(s: StreamInner) -> Self {
Stream(s, Default::default())
}
}
$( $(
impl From<crate::host::$host_mod::Device> for Device { impl From<crate::host::$host_mod::Device> for Device {
fn from(h: crate::host::$host_mod::Device) -> Self { fn from(h: crate::host::$host_mod::Device) -> Self {
Device(DeviceInner::$HostVariant(h)) DeviceInner::$HostVariant(h).into()
} }
} }
impl From<crate::host::$host_mod::Devices> for Devices { impl From<crate::host::$host_mod::Devices> for Devices {
fn from(h: crate::host::$host_mod::Devices) -> Self { fn from(h: crate::host::$host_mod::Devices) -> Self {
Devices(DevicesInner::$HostVariant(h)) DevicesInner::$HostVariant(h).into()
}
}
impl From<crate::host::$host_mod::EventLoop> for EventLoop {
fn from(h: crate::host::$host_mod::EventLoop) -> Self {
EventLoop(EventLoopInner::$HostVariant(h))
} }
} }
impl From<crate::host::$host_mod::Host> for Host { impl From<crate::host::$host_mod::Host> for Host {
fn from(h: crate::host::$host_mod::Host) -> Self { fn from(h: crate::host::$host_mod::Host) -> Self {
Host(HostInner::$HostVariant(h)) HostInner::$HostVariant(h).into()
} }
} }
impl From<crate::host::$host_mod::StreamId> for StreamId { impl From<crate::host::$host_mod::Stream> for Stream {
fn from(h: crate::host::$host_mod::StreamId) -> Self { fn from(h: crate::host::$host_mod::Stream) -> Self {
StreamId(StreamIdInner::$HostVariant(h)) StreamInner::$HostVariant(h).into()
} }
} }
)* )*
@ -457,7 +407,7 @@ macro_rules! impl_platform_host {
HostId::$HostVariant => { HostId::$HostVariant => {
crate::host::$host_mod::Host::new() crate::host::$host_mod::Host::new()
.map(HostInner::$HostVariant) .map(HostInner::$HostVariant)
.map(Host) .map(Host::from)
} }
)* )*
} }
@ -471,9 +421,8 @@ mod platform_impl {
pub use crate::host::alsa::{ pub use crate::host::alsa::{
Device as AlsaDevice, Device as AlsaDevice,
Devices as AlsaDevices, Devices as AlsaDevices,
EventLoop as AlsaEventLoop,
Host as AlsaHost, Host as AlsaHost,
StreamId as AlsaStreamId, Stream as AlsaStream,
SupportedInputFormats as AlsaSupportedInputFormats, SupportedInputFormats as AlsaSupportedInputFormats,
SupportedOutputFormats as AlsaSupportedOutputFormats, SupportedOutputFormats as AlsaSupportedOutputFormats,
}; };
@ -494,9 +443,8 @@ mod platform_impl {
pub use crate::host::coreaudio::{ pub use crate::host::coreaudio::{
Device as CoreAudioDevice, Device as CoreAudioDevice,
Devices as CoreAudioDevices, Devices as CoreAudioDevices,
EventLoop as CoreAudioEventLoop,
Host as CoreAudioHost, Host as CoreAudioHost,
StreamId as CoreAudioStreamId, Stream as CoreAudioStream,
SupportedInputFormats as CoreAudioSupportedInputFormats, SupportedInputFormats as CoreAudioSupportedInputFormats,
SupportedOutputFormats as CoreAudioSupportedOutputFormats, SupportedOutputFormats as CoreAudioSupportedOutputFormats,
}; };
@ -516,9 +464,8 @@ mod platform_impl {
pub use crate::host::emscripten::{ pub use crate::host::emscripten::{
Device as EmscriptenDevice, Device as EmscriptenDevice,
Devices as EmscriptenDevices, Devices as EmscriptenDevices,
EventLoop as EmscriptenEventLoop,
Host as EmscriptenHost, Host as EmscriptenHost,
StreamId as EmscriptenStreamId, Stream as EmscriptenStream,
SupportedInputFormats as EmscriptenSupportedInputFormats, SupportedInputFormats as EmscriptenSupportedInputFormats,
SupportedOutputFormats as EmscriptenSupportedOutputFormats, SupportedOutputFormats as EmscriptenSupportedOutputFormats,
}; };
@ -539,18 +486,16 @@ mod platform_impl {
pub use crate::host::asio::{ pub use crate::host::asio::{
Device as AsioDevice, Device as AsioDevice,
Devices as AsioDevices, Devices as AsioDevices,
EventLoop as AsioEventLoop, Stream as AsioStream,
Host as AsioHost, Host as AsioHost,
StreamId as AsioStreamId,
SupportedInputFormats as AsioSupportedInputFormats, SupportedInputFormats as AsioSupportedInputFormats,
SupportedOutputFormats as AsioSupportedOutputFormats, SupportedOutputFormats as AsioSupportedOutputFormats,
}; };
pub use crate::host::wasapi::{ pub use crate::host::wasapi::{
Device as WasapiDevice, Device as WasapiDevice,
Devices as WasapiDevices, Devices as WasapiDevices,
EventLoop as WasapiEventLoop, Stream as WasapiStream,
Host as WasapiHost, Host as WasapiHost,
StreamId as WasapiStreamId,
SupportedInputFormats as WasapiSupportedInputFormats, SupportedInputFormats as WasapiSupportedInputFormats,
SupportedOutputFormats as WasapiSupportedOutputFormats, SupportedOutputFormats as WasapiSupportedOutputFormats,
}; };
@ -591,3 +536,19 @@ mod platform_impl {
.into() .into()
} }
} }
// The following zero-sized types are for applying Send/Sync restrictions to ensure
// consistent behaviour across different platforms. These verbosely named types are used
// (rather than using the markers directly) in the hope of making the compile errors
// slightly more helpful.
//
// TODO: Remove these in favour of using negative trait bounds if they stabilise.
// A marker used to remove the `Send` and `Sync` traits.
struct NotSendSyncAcrossAllPlatforms(std::marker::PhantomData<*mut ()>);
impl Default for NotSendSyncAcrossAllPlatforms {
fn default() -> Self {
NotSendSyncAcrossAllPlatforms(std::marker::PhantomData)
}
}

View File

@ -10,7 +10,8 @@ use {
OutputDevices, OutputDevices,
PauseStreamError, PauseStreamError,
PlayStreamError, PlayStreamError,
StreamDataResult, StreamData,
StreamError,
SupportedFormat, SupportedFormat,
SupportedFormatsError, SupportedFormatsError,
}; };
@ -39,8 +40,6 @@ pub trait HostTrait {
type Devices: Iterator<Item = Self::Device>; type Devices: Iterator<Item = Self::Device>;
/// The `Device` type yielded by the host. /// The `Device` type yielded by the host.
type Device: DeviceTrait; type Device: DeviceTrait;
/// The event loop type used by the `Host`
type EventLoop: EventLoopTrait<Device = Self::Device>;
/// Whether or not the host is available on the system. /// Whether or not the host is available on the system.
fn is_available() -> bool; fn is_available() -> bool;
@ -60,9 +59,6 @@ pub trait HostTrait {
/// Returns `None` if no output device is available. /// Returns `None` if no output device is available.
fn default_output_device(&self) -> Option<Self::Device>; fn default_output_device(&self) -> Option<Self::Device>;
/// Initialise the event loop, ready for managing audio streams.
fn event_loop(&self) -> Self::EventLoop;
/// An iterator yielding all `Device`s currently available to the system that support one or more /// An iterator yielding all `Device`s currently available to the system that support one or more
/// input stream formats. /// input stream formats.
/// ///
@ -99,6 +95,8 @@ pub trait DeviceTrait {
type SupportedInputFormats: Iterator<Item = SupportedFormat>; type SupportedInputFormats: Iterator<Item = SupportedFormat>;
/// The iterator type yielding supported output stream formats. /// The iterator type yielding supported output stream formats.
type SupportedOutputFormats: Iterator<Item = SupportedFormat>; type SupportedOutputFormats: Iterator<Item = SupportedFormat>;
/// The stream type created by `build_input_stream` and `build_output_stream`.
type Stream: StreamTrait;
/// The human-readable name of the device. /// The human-readable name of the device.
fn name(&self) -> Result<String, DeviceNameError>; fn name(&self) -> Result<String, DeviceNameError>;
@ -118,81 +116,28 @@ pub trait DeviceTrait {
/// The default output stream format for the device. /// The default output stream format for the device.
fn default_output_format(&self) -> Result<Format, DefaultFormatError>; fn default_output_format(&self) -> Result<Format, DefaultFormatError>;
/// Create an input stream.
fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static;
/// Create an output stream.
fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static;
} }
/// Collection of streams managed together. /// A stream created from `Device`, with methods to control playback.
/// pub trait StreamTrait {
/// Created with the `Host::event_loop` method. /// Run the stream.
pub trait EventLoopTrait { ///
/// The `Device` type yielded by the host. /// Note: Not all platforms automatically run the stream upon creation, so it is important to
type Device: DeviceTrait; /// call `play` after creation if it is expected that the stream should run immediately.
/// The type used to uniquely distinguish between streams. fn play(&self) -> Result<(), PlayStreamError>;
type StreamId: StreamIdTrait;
/// Creates a new input stream that will run from the given device and with the given format. /// Some devices support pausing the audio stream. This can be useful for saving energy in
/// moments of silence.
/// ///
/// On success, returns an identifier for the stream. /// Note: Not all devices support suspending the stream at the hardware level. This method may
/// /// fail in these cases.
/// Can return an error if the device is no longer valid, or if the input stream format is not fn pause(&self) -> Result<(), PauseStreamError>;
/// supported by the device.
fn build_input_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError>;
/// Creates a new output stream that will play on the given device and with the given format.
///
/// On success, returns an identifier for the stream.
///
/// Can return an error if the device is no longer valid, or if the output stream format is not
/// supported by the device.
fn build_output_stream(
&self,
device: &Self::Device,
format: &Format,
) -> Result<Self::StreamId, BuildStreamError>;
/// Instructs the audio device that it should start playing the stream with the given ID.
///
/// Has no effect is the stream was already playing.
///
/// Only call this after you have submitted some data, otherwise you may hear some glitches.
///
/// # Panic
///
/// If the stream does not exist, this function can either panic or be a no-op.
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError>;
/// Instructs the audio device that it should stop playing the stream with the given ID.
///
/// Has no effect is the stream was already paused.
///
/// If you call `play` afterwards, the playback will resume where it was.
///
/// # Panic
///
/// If the stream does not exist, this function can either panic or be a no-op.
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError>;
/// Destroys an existing stream.
///
/// # Panic
///
/// If the stream does not exist, this function can either panic or be a no-op.
fn destroy_stream(&self, stream: Self::StreamId);
/// Takes control of the current thread and begins the stream processing.
///
/// > **Note**: Since it takes control of the thread, this method is best called on a separate
/// > thread.
///
/// Whenever a stream needs to be fed some data, the closure passed as parameter is called.
/// You can call the other methods of `EventLoop` without getting a deadlock.
fn run<F>(&self, callback: F) -> !
where
F: FnMut(Self::StreamId, StreamDataResult) + Send;
} }
/// The set of required bounds for host `StreamId` types.
pub trait StreamIdTrait: Clone + std::fmt::Debug + std::hash::Hash + PartialEq + Eq {}