Merge pull request #354 from mitchmindtree/no-eventloop-rebased
Removing the `EventLoop` - rebased
This commit is contained in:
commit
59ac088167
|
@ -3,3 +3,4 @@
|
|||
.cargo/
|
||||
.DS_Store
|
||||
recorded.wav
|
||||
rls*.log
|
||||
|
|
|
@ -24,6 +24,7 @@ ringbuf = "0.1.6"
|
|||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "std", "synchapi", "winbase", "winuser"] }
|
||||
asio-sys = { version = "0.1", path = "asio-sys", optional = true }
|
||||
parking_lot = "0.9"
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))'.dependencies]
|
||||
alsa-sys = { version = "0.1", path = "alsa-sys" }
|
||||
|
|
|
@ -7,7 +7,7 @@ use self::errors::{AsioError, AsioErrorWrapper, LoadDriverError};
|
|||
use std::ffi::CStr;
|
||||
use std::ffi::CString;
|
||||
use std::os::raw::{c_char, c_double, c_long, c_void};
|
||||
use std::sync::{Arc, Mutex, Weak};
|
||||
use std::sync::{Arc, Mutex, MutexGuard, Weak};
|
||||
|
||||
// Bindings import
|
||||
use self::asio_import as ai;
|
||||
|
@ -85,7 +85,7 @@ pub struct SampleRate {
|
|||
}
|
||||
|
||||
/// Holds the pointer to the callbacks that come from cpal
|
||||
struct BufferCallback(Box<FnMut(i32) + Send>);
|
||||
struct BufferCallback(Box<dyn FnMut(i32) + Send>);
|
||||
|
||||
/// Input and Output streams.
|
||||
///
|
||||
|
@ -235,6 +235,9 @@ struct BufferSizes {
|
|||
grans: c_long,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct CallbackId(usize);
|
||||
|
||||
lazy_static! {
|
||||
/// A global way to access all the callbacks.
|
||||
///
|
||||
|
@ -244,7 +247,7 @@ lazy_static! {
|
|||
/// Options are used so that when a callback is removed we don't change the Vec indices.
|
||||
///
|
||||
/// The indices are how we match a callback with a stream.
|
||||
static ref BUFFER_CALLBACK: Mutex<Vec<Option<BufferCallback>>> = Mutex::new(Vec::new());
|
||||
static ref BUFFER_CALLBACK: Mutex<Vec<(CallbackId, BufferCallback)>> = Mutex::new(Vec::new());
|
||||
}
|
||||
|
||||
impl Asio {
|
||||
|
@ -419,6 +422,8 @@ impl Driver {
|
|||
// To pass as ai::ASIOCallbacks
|
||||
let mut callbacks = create_asio_callbacks();
|
||||
|
||||
let mut state = self.inner.lock_state();
|
||||
|
||||
// Retrieve the available buffer sizes.
|
||||
let buffer_sizes = asio_get_buffer_sizes()?;
|
||||
if buffer_sizes.pref <= 0 {
|
||||
|
@ -429,13 +434,12 @@ impl Driver {
|
|||
}
|
||||
|
||||
// Ensure the driver is in the `Initialized` state.
|
||||
if let DriverState::Running = self.inner.state() {
|
||||
self.stop()?;
|
||||
if let DriverState::Running = *state {
|
||||
state.stop()?;
|
||||
}
|
||||
if let DriverState::Prepared = self.inner.state() {
|
||||
self.dispose_buffers()?;
|
||||
if let DriverState::Prepared = *state {
|
||||
state.dispose_buffers()?;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
asio_result!(ai::ASIOCreateBuffers(
|
||||
buffer_infos.as_mut_ptr() as *mut _,
|
||||
|
@ -444,8 +448,8 @@ impl Driver {
|
|||
&mut callbacks as *mut _ as *mut _,
|
||||
))?;
|
||||
}
|
||||
*state = DriverState::Prepared;
|
||||
|
||||
self.inner.set_state(DriverState::Prepared);
|
||||
Ok(buffer_sizes.pref)
|
||||
}
|
||||
|
||||
|
@ -566,13 +570,14 @@ impl Driver {
|
|||
///
|
||||
/// No-op if already `Running`.
|
||||
pub fn start(&self) -> Result<(), AsioError> {
|
||||
if let DriverState::Running = self.inner.state() {
|
||||
let mut state = self.inner.lock_state();
|
||||
if let DriverState::Running = *state {
|
||||
return Ok(());
|
||||
}
|
||||
unsafe {
|
||||
asio_result!(ai::ASIOStart())?;
|
||||
}
|
||||
self.inner.set_state(DriverState::Running);
|
||||
*state = DriverState::Running;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -589,12 +594,26 @@ impl Driver {
|
|||
/// Adds a callback to the list of active callbacks.
|
||||
///
|
||||
/// The given function receives the index of the buffer currently ready for processing.
|
||||
pub fn set_callback<F>(&self, callback: F)
|
||||
///
|
||||
/// Returns an ID uniquely associated with the given callback so that it may be removed later.
|
||||
pub fn add_callback<F>(&self, callback: F) -> CallbackId
|
||||
where
|
||||
F: 'static + FnMut(i32) + Send,
|
||||
{
|
||||
let mut bc = BUFFER_CALLBACK.lock().unwrap();
|
||||
bc.push(Some(BufferCallback(Box::new(callback))));
|
||||
let id = bc
|
||||
.last()
|
||||
.map(|&(id, _)| CallbackId(id.0.checked_add(1).expect("stream ID overflowed")))
|
||||
.unwrap_or(CallbackId(0));
|
||||
let cb = BufferCallback(Box::new(callback));
|
||||
bc.push((id, cb));
|
||||
id
|
||||
}
|
||||
|
||||
/// Remove the callback with the given ID.
|
||||
pub fn remove_callback(&self, rem_id: CallbackId) {
|
||||
let mut bc = BUFFER_CALLBACK.lock().unwrap();
|
||||
bc.retain(|&(id, _)| id != rem_id);
|
||||
}
|
||||
|
||||
/// Consumes and destroys the `Driver`, stopping the streams if they are running and releasing
|
||||
|
@ -618,55 +637,70 @@ impl Driver {
|
|||
}
|
||||
}
|
||||
|
||||
impl DriverInner {
|
||||
fn state(&self) -> DriverState {
|
||||
*self.state.lock().expect("failed to lock `DriverState`")
|
||||
}
|
||||
|
||||
fn set_state(&self, state: DriverState) {
|
||||
*self.state.lock().expect("failed to lock `DriverState`") = state;
|
||||
}
|
||||
|
||||
fn stop_inner(&self) -> Result<(), AsioError> {
|
||||
if let DriverState::Running = self.state() {
|
||||
impl DriverState {
|
||||
fn stop(&mut self) -> Result<(), AsioError> {
|
||||
if let DriverState::Running = *self {
|
||||
unsafe {
|
||||
asio_result!(ai::ASIOStop())?;
|
||||
}
|
||||
self.set_state(DriverState::Prepared);
|
||||
*self = DriverState::Prepared;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn dispose_buffers_inner(&self) -> Result<(), AsioError> {
|
||||
if let DriverState::Initialized = self.state() {
|
||||
fn dispose_buffers(&mut self) -> Result<(), AsioError> {
|
||||
if let DriverState::Initialized = *self {
|
||||
return Ok(());
|
||||
}
|
||||
if let DriverState::Running = self.state() {
|
||||
self.stop_inner()?;
|
||||
if let DriverState::Running = *self {
|
||||
self.stop()?;
|
||||
}
|
||||
unsafe {
|
||||
asio_result!(ai::ASIODisposeBuffers())?;
|
||||
}
|
||||
self.set_state(DriverState::Initialized);
|
||||
*self = DriverState::Initialized;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn destroy_inner(&mut self) -> Result<(), AsioError> {
|
||||
// Drop back through the driver state machine one state at a time.
|
||||
if let DriverState::Running = self.state() {
|
||||
self.stop_inner()?;
|
||||
fn destroy(&mut self) -> Result<(), AsioError> {
|
||||
if let DriverState::Running = *self {
|
||||
self.stop()?;
|
||||
}
|
||||
if let DriverState::Prepared = self.state() {
|
||||
self.dispose_buffers_inner()?;
|
||||
if let DriverState::Prepared = *self {
|
||||
self.dispose_buffers()?;
|
||||
}
|
||||
unsafe {
|
||||
asio_result!(ai::ASIOExit())?;
|
||||
ai::remove_current_driver();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Clear any existing stream callbacks.
|
||||
if let Ok(mut bcs) = BUFFER_CALLBACK.lock() {
|
||||
bcs.clear();
|
||||
impl DriverInner {
|
||||
fn lock_state(&self) -> MutexGuard<DriverState> {
|
||||
self.state.lock().expect("failed to lock `DriverState`")
|
||||
}
|
||||
|
||||
fn stop_inner(&self) -> Result<(), AsioError> {
|
||||
let mut state = self.lock_state();
|
||||
state.stop()
|
||||
}
|
||||
|
||||
fn dispose_buffers_inner(&self) -> Result<(), AsioError> {
|
||||
let mut state = self.lock_state();
|
||||
state.dispose_buffers()
|
||||
}
|
||||
|
||||
fn destroy_inner(&mut self) -> Result<(), AsioError> {
|
||||
{
|
||||
let mut state = self.lock_state();
|
||||
state.destroy()?;
|
||||
|
||||
// Clear any existing stream callbacks.
|
||||
if let Ok(mut bcs) = BUFFER_CALLBACK.lock() {
|
||||
bcs.clear();
|
||||
}
|
||||
}
|
||||
|
||||
// Signal that the driver has been destroyed.
|
||||
|
@ -863,10 +897,8 @@ extern "C" fn buffer_switch_time_info(
|
|||
) -> *mut ai::ASIOTime {
|
||||
// This lock is probably unavoidable, but locks in the audio stream are not great.
|
||||
let mut bcs = BUFFER_CALLBACK.lock().unwrap();
|
||||
for mut bc in bcs.iter_mut() {
|
||||
if let Some(ref mut bc) = bc {
|
||||
bc.run(double_buffer_index);
|
||||
}
|
||||
for &mut (_, ref mut bc) in bcs.iter_mut() {
|
||||
bc.run(double_buffer_index);
|
||||
}
|
||||
time
|
||||
}
|
||||
|
|
|
@ -1,37 +1,26 @@
|
|||
extern crate anyhow;
|
||||
extern crate cpal;
|
||||
|
||||
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait};
|
||||
use cpal::traits::{DeviceTrait, StreamTrait, HostTrait};
|
||||
|
||||
fn main() -> Result<(), anyhow::Error> {
|
||||
let host = cpal::default_host();
|
||||
let device = host.default_output_device().expect("failed to find a default output device");
|
||||
let format = device.default_output_format()?;
|
||||
let event_loop = host.event_loop();
|
||||
let stream_id = event_loop.build_output_stream(&device, &format)?;
|
||||
event_loop.play_stream(stream_id.clone())?;
|
||||
|
||||
let sample_rate = format.sample_rate.0 as f32;
|
||||
let channels = format.channels;
|
||||
let mut sample_clock = 0f32;
|
||||
|
||||
// Produce a sinusoid of maximum amplitude.
|
||||
let mut next_value = || {
|
||||
let mut next_value = move || {
|
||||
sample_clock = (sample_clock + 1.0) % sample_rate;
|
||||
(sample_clock * 440.0 * 2.0 * 3.141592 / sample_rate).sin()
|
||||
};
|
||||
|
||||
event_loop.run(move |id, result| {
|
||||
let data = match result {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
eprintln!("an error occurred on stream {:?}: {}", id, err);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let stream = device.build_output_stream(&format, move |data| {
|
||||
match data {
|
||||
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::U16(mut buffer) } => {
|
||||
for sample in buffer.chunks_mut(format.channels as usize) {
|
||||
for sample in buffer.chunks_mut(channels as usize) {
|
||||
let value = ((next_value() * 0.5 + 0.5) * std::u16::MAX as f32) as u16;
|
||||
for out in sample.iter_mut() {
|
||||
*out = value;
|
||||
|
@ -39,7 +28,7 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
}
|
||||
},
|
||||
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::I16(mut buffer) } => {
|
||||
for sample in buffer.chunks_mut(format.channels as usize) {
|
||||
for sample in buffer.chunks_mut(channels as usize) {
|
||||
let value = (next_value() * std::i16::MAX as f32) as i16;
|
||||
for out in sample.iter_mut() {
|
||||
*out = value;
|
||||
|
@ -47,7 +36,7 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
}
|
||||
},
|
||||
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => {
|
||||
for sample in buffer.chunks_mut(format.channels as usize) {
|
||||
for sample in buffer.chunks_mut(channels as usize) {
|
||||
let value = next_value();
|
||||
for out in sample.iter_mut() {
|
||||
*out = value;
|
||||
|
@ -56,5 +45,12 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
},
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
}, move |err| {
|
||||
eprintln!("an error occurred on stream: {}", err);
|
||||
})?;
|
||||
stream.play()?;
|
||||
|
||||
std::thread::sleep(std::time::Duration::from_millis(1000));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -10,18 +10,21 @@ extern crate anyhow;
|
|||
extern crate cpal;
|
||||
extern crate ringbuf;
|
||||
|
||||
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait};
|
||||
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
use ringbuf::RingBuffer;
|
||||
|
||||
const LATENCY_MS: f32 = 150.0;
|
||||
|
||||
fn main() -> Result<(), anyhow::Error> {
|
||||
let host = cpal::default_host();
|
||||
let event_loop = host.event_loop();
|
||||
|
||||
// Default devices.
|
||||
let input_device = host.default_input_device().expect("failed to get default input device");
|
||||
let output_device = host.default_output_device().expect("failed to get default output device");
|
||||
let input_device = host
|
||||
.default_input_device()
|
||||
.expect("failed to get default input device");
|
||||
let output_device = host
|
||||
.default_output_device()
|
||||
.expect("failed to get default output device");
|
||||
println!("Using default input device: \"{}\"", input_device.name()?);
|
||||
println!("Using default output device: \"{}\"", output_device.name()?);
|
||||
|
||||
|
@ -29,12 +32,6 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
let mut format = input_device.default_input_format()?;
|
||||
format.data_type = cpal::SampleFormat::F32;
|
||||
|
||||
// Build streams.
|
||||
println!("Attempting to build both streams with `{:?}`.", format);
|
||||
let input_stream_id = event_loop.build_input_stream(&input_device, &format)?;
|
||||
let output_stream_id = event_loop.build_output_stream(&output_device, &format)?;
|
||||
println!("Successfully built streams.");
|
||||
|
||||
// Create a delay in case the input and output devices aren't synced.
|
||||
let latency_frames = (LATENCY_MS / 1_000.0) * format.sample_rate.0 as f32;
|
||||
let latency_samples = latency_frames as usize * format.channels as usize;
|
||||
|
@ -50,59 +47,67 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
producer.push(0.0).unwrap();
|
||||
}
|
||||
|
||||
// Play the streams.
|
||||
println!("Starting the input and output streams with `{}` milliseconds of latency.", LATENCY_MS);
|
||||
event_loop.play_stream(input_stream_id.clone())?;
|
||||
event_loop.play_stream(output_stream_id.clone())?;
|
||||
|
||||
// Run the event loop on a separate thread.
|
||||
std::thread::spawn(move || {
|
||||
event_loop.run(move |id, result| {
|
||||
let data = match result {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
eprintln!("an error occurred on stream {:?}: {}", id, err);
|
||||
return;
|
||||
// Build streams.
|
||||
println!("Attempting to build both streams with `{:?}`.", format);
|
||||
let input_stream = input_device.build_input_stream(&format, move |data| {
|
||||
match data {
|
||||
cpal::StreamData::Input {
|
||||
buffer: cpal::UnknownTypeInputBuffer::F32(buffer),
|
||||
} => {
|
||||
let mut output_fell_behind = false;
|
||||
for &sample in buffer.iter() {
|
||||
if producer.push(sample).is_err() {
|
||||
output_fell_behind = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
if output_fell_behind {
|
||||
eprintln!("output stream fell behind: try increasing latency");
|
||||
}
|
||||
},
|
||||
_ => panic!("Expected input with f32 data"),
|
||||
}
|
||||
}, move |err| {
|
||||
eprintln!("an error occurred on input stream: {}", err);
|
||||
})?;
|
||||
let output_stream = output_device.build_output_stream(&format, move |data| {
|
||||
match data {
|
||||
cpal::StreamData::Output {
|
||||
buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer),
|
||||
} => {
|
||||
let mut input_fell_behind = None;
|
||||
for sample in buffer.iter_mut() {
|
||||
*sample = match consumer.pop() {
|
||||
Ok(s) => s,
|
||||
Err(err) => {
|
||||
input_fell_behind = Some(err);
|
||||
0.0
|
||||
},
|
||||
};
|
||||
}
|
||||
if let Some(err) = input_fell_behind {
|
||||
eprintln!("input stream fell behind: {:?}: try increasing latency", err);
|
||||
}
|
||||
},
|
||||
_ => panic!("Expected output with f32 data"),
|
||||
}
|
||||
}, move |err| {
|
||||
eprintln!("an error occurred on output stream: {}", err);
|
||||
})?;
|
||||
println!("Successfully built streams.");
|
||||
|
||||
match data {
|
||||
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::F32(buffer) } => {
|
||||
assert_eq!(id, input_stream_id);
|
||||
let mut output_fell_behind = false;
|
||||
for &sample in buffer.iter() {
|
||||
if producer.push(sample).is_err() {
|
||||
output_fell_behind = true;
|
||||
}
|
||||
}
|
||||
if output_fell_behind {
|
||||
eprintln!("output stream fell behind: try increasing latency");
|
||||
}
|
||||
},
|
||||
cpal::StreamData::Output { buffer: cpal::UnknownTypeOutputBuffer::F32(mut buffer) } => {
|
||||
assert_eq!(id, output_stream_id);
|
||||
let mut input_fell_behind = None;
|
||||
for sample in buffer.iter_mut() {
|
||||
*sample = match consumer.pop() {
|
||||
Ok(s) => s,
|
||||
Err(err) => {
|
||||
input_fell_behind = Some(err);
|
||||
0.0
|
||||
},
|
||||
};
|
||||
}
|
||||
if let Some(_) = input_fell_behind {
|
||||
eprintln!("input stream fell behind: try increasing latency");
|
||||
}
|
||||
},
|
||||
_ => panic!("we're expecting f32 data"),
|
||||
}
|
||||
});
|
||||
});
|
||||
// Play the streams.
|
||||
println!(
|
||||
"Starting the input and output streams with `{}` milliseconds of latency.",
|
||||
LATENCY_MS
|
||||
);
|
||||
input_stream.play()?;
|
||||
output_stream.play()?;
|
||||
|
||||
// Run for 3 seconds before closing.
|
||||
println!("Playing for 3 seconds... ");
|
||||
std::thread::sleep(std::time::Duration::from_secs(3));
|
||||
drop(input_stream);
|
||||
drop(output_stream);
|
||||
println!("Done!");
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -6,21 +6,21 @@ extern crate anyhow;
|
|||
extern crate cpal;
|
||||
extern crate hound;
|
||||
|
||||
use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait};
|
||||
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
fn main() -> Result<(), anyhow::Error> {
|
||||
// Use the default host for working with audio devices.
|
||||
let host = cpal::default_host();
|
||||
|
||||
// Setup the default input device and stream with the default input format.
|
||||
let device = host.default_input_device().expect("Failed to get default input device");
|
||||
let device = host
|
||||
.default_input_device()
|
||||
.expect("Failed to get default input device");
|
||||
println!("Default input device: {}", device.name()?);
|
||||
let format = device.default_input_format().expect("Failed to get default input format");
|
||||
let format = device
|
||||
.default_input_format()
|
||||
.expect("Failed to get default input format");
|
||||
println!("Default input format: {:?}", format);
|
||||
let event_loop = host.event_loop();
|
||||
let stream_id = event_loop.build_input_stream(&device, &format)?;
|
||||
event_loop.play_stream(stream_id)?;
|
||||
|
||||
// The WAV file we're recording to.
|
||||
const PATH: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/recorded.wav");
|
||||
let spec = wav_spec_from_format(&format);
|
||||
|
@ -29,63 +29,56 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
|
||||
// A flag to indicate that recording is in progress.
|
||||
println!("Begin recording...");
|
||||
let recording = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(true));
|
||||
|
||||
// Run the input stream on a separate thread.
|
||||
let writer_2 = writer.clone();
|
||||
let recording_2 = recording.clone();
|
||||
std::thread::spawn(move || {
|
||||
event_loop.run(move |id, event| {
|
||||
let data = match event {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
eprintln!("an error occurred on stream {:?}: {}", id, err);
|
||||
return;
|
||||
let stream = device.build_input_stream(&format, move |data| {
|
||||
// Otherwise write to the wav writer.
|
||||
match data {
|
||||
cpal::StreamData::Input {
|
||||
buffer: cpal::UnknownTypeInputBuffer::U16(buffer),
|
||||
} => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for sample in buffer.iter() {
|
||||
let sample = cpal::Sample::to_i16(sample);
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If we're done recording, return early.
|
||||
if !recording_2.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
// Otherwise write to the wav writer.
|
||||
match data {
|
||||
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::U16(buffer) } => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for sample in buffer.iter() {
|
||||
let sample = cpal::Sample::to_i16(sample);
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
},
|
||||
cpal::StreamData::Input {
|
||||
buffer: cpal::UnknownTypeInputBuffer::I16(buffer),
|
||||
} => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for &sample in buffer.iter() {
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
}
|
||||
},
|
||||
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::I16(buffer) } => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for &sample in buffer.iter() {
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
}
|
||||
},
|
||||
cpal::StreamData::Input {
|
||||
buffer: cpal::UnknownTypeInputBuffer::F32(buffer),
|
||||
} => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for &sample in buffer.iter() {
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
}
|
||||
},
|
||||
cpal::StreamData::Input { buffer: cpal::UnknownTypeInputBuffer::F32(buffer) } => {
|
||||
if let Ok(mut guard) = writer_2.try_lock() {
|
||||
if let Some(writer) = guard.as_mut() {
|
||||
for &sample in buffer.iter() {
|
||||
writer.write_sample(sample).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
}
|
||||
}, move |err| {
|
||||
eprintln!("an error occurred on stream: {}", err);
|
||||
})?;
|
||||
stream.play()?;
|
||||
|
||||
// Let recording go for roughly three seconds.
|
||||
std::thread::sleep(std::time::Duration::from_secs(3));
|
||||
recording.store(false, std::sync::atomic::Ordering::Relaxed);
|
||||
drop(stream);
|
||||
writer.lock().unwrap().take().unwrap().finalize()?;
|
||||
println!("Recording {} complete!", PATH);
|
||||
Ok(())
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,7 +3,7 @@ pub type SupportedInputFormats = std::vec::IntoIter<SupportedFormat>;
|
|||
pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc};
|
||||
use BackendSpecificError;
|
||||
use DefaultFormatError;
|
||||
use DeviceNameError;
|
||||
|
@ -14,12 +14,17 @@ use SampleRate;
|
|||
use SupportedFormat;
|
||||
use SupportedFormatsError;
|
||||
use super::sys;
|
||||
use super::parking_lot::Mutex;
|
||||
|
||||
/// A ASIO Device
|
||||
#[derive(Debug)]
|
||||
pub struct Device {
|
||||
/// The driver represented by this device.
|
||||
pub driver: Arc<sys::Driver>,
|
||||
|
||||
// Input and/or Output stream.
|
||||
// An driver can only have one of each.
|
||||
// They need to be created at the same time.
|
||||
pub asio_streams: Arc<Mutex<sys::AsioStreams>>,
|
||||
}
|
||||
|
||||
/// All available devices.
|
||||
|
@ -148,7 +153,14 @@ impl Iterator for Devices {
|
|||
loop {
|
||||
match self.drivers.next() {
|
||||
Some(name) => match self.asio.load_driver(&name) {
|
||||
Ok(driver) => return Some(Device { driver: Arc::new(driver) }),
|
||||
Ok(driver) => {
|
||||
let driver = Arc::new(driver);
|
||||
let asio_streams = Arc::new(Mutex::new(sys::AsioStreams {
|
||||
input: None,
|
||||
output: None,
|
||||
}));
|
||||
return Some(Device { driver, asio_streams });
|
||||
}
|
||||
Err(_) => continue,
|
||||
}
|
||||
None => return None,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
extern crate asio_sys as sys;
|
||||
extern crate parking_lot;
|
||||
|
||||
use {
|
||||
BuildStreamError,
|
||||
|
@ -8,18 +9,18 @@ use {
|
|||
Format,
|
||||
PauseStreamError,
|
||||
PlayStreamError,
|
||||
StreamDataResult,
|
||||
SupportedFormatsError,
|
||||
StreamData,
|
||||
StreamError,
|
||||
};
|
||||
use traits::{
|
||||
DeviceTrait,
|
||||
EventLoopTrait,
|
||||
HostTrait,
|
||||
StreamIdTrait,
|
||||
StreamTrait,
|
||||
};
|
||||
|
||||
pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats};
|
||||
pub use self::stream::{EventLoop, StreamId};
|
||||
pub use self::stream::Stream;
|
||||
use std::sync::Arc;
|
||||
|
||||
mod device;
|
||||
|
@ -42,7 +43,6 @@ impl Host {
|
|||
impl HostTrait for Host {
|
||||
type Devices = Devices;
|
||||
type Device = Device;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
true
|
||||
|
@ -62,15 +62,12 @@ impl HostTrait for Host {
|
|||
// ASIO has no concept of a default device, so just use the first.
|
||||
self.output_devices().ok().and_then(|mut ds| ds.next())
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
EventLoop::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Device::name(self)
|
||||
|
@ -91,46 +88,28 @@ impl DeviceTrait for Device {
|
|||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_output_format(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventLoopTrait for EventLoop {
|
||||
type Device = Device;
|
||||
type StreamId = StreamId;
|
||||
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_input_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_output_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
|
||||
EventLoop::play_stream(self, stream)
|
||||
}
|
||||
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
|
||||
EventLoop::pause_stream(self, stream)
|
||||
}
|
||||
|
||||
fn destroy_stream(&self, stream: Self::StreamId) {
|
||||
EventLoop::destroy_stream(self, stream)
|
||||
}
|
||||
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
F: FnMut(Self::StreamId, StreamDataResult) + Send,
|
||||
D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static
|
||||
{
|
||||
EventLoop::run(self, callback)
|
||||
Device::build_input_stream(self, format, data_callback, error_callback)
|
||||
}
|
||||
|
||||
fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static
|
||||
{
|
||||
Device::build_output_stream(self, format, data_callback, error_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl StreamIdTrait for StreamId {}
|
||||
impl StreamTrait for Stream {
|
||||
fn play(&self) -> Result<(), PlayStreamError> {
|
||||
Stream::play(self)
|
||||
}
|
||||
|
||||
fn pause(&self) -> Result<(), PauseStreamError> {
|
||||
Stream::pause(self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,11 +4,9 @@ extern crate num_traits;
|
|||
use self::num_traits::PrimInt;
|
||||
use super::Device;
|
||||
use std;
|
||||
use std::mem;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use std::sync::atomic::{Ordering, AtomicBool};
|
||||
use std::sync::Arc;
|
||||
use super::parking_lot::Mutex;
|
||||
use BackendSpecificError;
|
||||
use BuildStreamError;
|
||||
use Format;
|
||||
|
@ -16,9 +14,9 @@ use PauseStreamError;
|
|||
use PlayStreamError;
|
||||
use SampleFormat;
|
||||
use StreamData;
|
||||
use StreamDataResult;
|
||||
use UnknownTypeInputBuffer;
|
||||
use UnknownTypeOutputBuffer;
|
||||
use StreamError;
|
||||
|
||||
/// Sample types whose constant silent value is known.
|
||||
trait Silence {
|
||||
|
@ -34,35 +32,6 @@ trait InterleavedSample: Clone + Copy + Silence {
|
|||
/// Constraints on the ASIO sample types.
|
||||
trait AsioSample: Clone + Copy + Silence + std::ops::Add<Self, Output = Self> {}
|
||||
|
||||
/// Controls all streams
|
||||
pub struct EventLoop {
|
||||
/// The input and output ASIO streams
|
||||
asio_streams: Arc<Mutex<sys::AsioStreams>>,
|
||||
/// List of all CPAL streams
|
||||
cpal_streams: Arc<Mutex<Vec<Option<Stream>>>>,
|
||||
/// Total stream count.
|
||||
stream_count: AtomicUsize,
|
||||
/// The CPAL callback that the user gives to fill the buffers.
|
||||
callbacks: Arc<Mutex<Option<&'static mut (FnMut(StreamId, StreamDataResult) + Send)>>>,
|
||||
}
|
||||
|
||||
/// Id for each stream.
|
||||
/// Created depending on the number they are created.
|
||||
/// Starting at one! not zero.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct StreamId(usize);
|
||||
|
||||
/// CPAL stream.
|
||||
/// This decouples the many cpal streams
|
||||
/// from the single input and single output
|
||||
/// ASIO streams.
|
||||
/// Each stream can be playing or paused.
|
||||
struct Stream {
|
||||
playing: bool,
|
||||
// The driver associated with this stream.
|
||||
driver: Arc<sys::Driver>,
|
||||
}
|
||||
|
||||
// Used to keep track of whether or not the current current asio stream buffer requires
|
||||
// being silencing before summing audio.
|
||||
#[derive(Default)]
|
||||
|
@ -71,114 +40,38 @@ struct SilenceAsioBuffer {
|
|||
second: bool,
|
||||
}
|
||||
|
||||
impl EventLoop {
|
||||
pub fn new() -> EventLoop {
|
||||
EventLoop {
|
||||
asio_streams: Arc::new(Mutex::new(sys::AsioStreams {
|
||||
input: None,
|
||||
output: None,
|
||||
})),
|
||||
cpal_streams: Arc::new(Mutex::new(Vec::new())),
|
||||
// This is why the Id's count from one not zero
|
||||
// because at this point there is no streams
|
||||
stream_count: AtomicUsize::new(0),
|
||||
callbacks: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
pub struct Stream {
|
||||
playing: Arc<AtomicBool>,
|
||||
// Ensure the `Driver` does not terminate until the last stream is dropped.
|
||||
driver: Arc<sys::Driver>,
|
||||
asio_streams: Arc<Mutex<sys::AsioStreams>>,
|
||||
callback_id: sys::CallbackId,
|
||||
}
|
||||
|
||||
impl Stream {
|
||||
pub fn play(&self) -> Result<(), PlayStreamError> {
|
||||
self.playing.store(true, Ordering::SeqCst);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a new CPAL Input Stream.
|
||||
///
|
||||
/// If there is no existing ASIO Input Stream it will be created.
|
||||
///
|
||||
/// On success, the buffer size of the stream is returned.
|
||||
fn get_or_create_input_stream(
|
||||
&self,
|
||||
driver: &sys::Driver,
|
||||
format: &Format,
|
||||
device: &Device,
|
||||
) -> Result<usize, BuildStreamError> {
|
||||
match device.default_input_format() {
|
||||
Ok(f) => {
|
||||
let num_asio_channels = f.channels;
|
||||
check_format(driver, format, num_asio_channels)
|
||||
},
|
||||
Err(_) => Err(BuildStreamError::FormatNotSupported),
|
||||
}?;
|
||||
let num_channels = format.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock().unwrap();
|
||||
// Either create a stream if thers none or had back the
|
||||
// size of the current one.
|
||||
match streams.input {
|
||||
Some(ref input) => Ok(input.buffer_size as usize),
|
||||
None => {
|
||||
let output = streams.output.take();
|
||||
driver
|
||||
.prepare_input_stream(output, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.input {
|
||||
Some(ref inp) => inp.buffer_size as usize,
|
||||
None => unreachable!(),
|
||||
};
|
||||
*streams = new_streams;
|
||||
bs
|
||||
}).map_err(|ref e| {
|
||||
println!("Error preparing stream: {}", e);
|
||||
BuildStreamError::DeviceNotAvailable
|
||||
})
|
||||
}
|
||||
}
|
||||
pub fn pause(&self) -> Result<(), PauseStreamError> {
|
||||
self.playing.store(false, Ordering::SeqCst);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new CPAL Output Stream.
|
||||
///
|
||||
/// If there is no existing ASIO Output Stream it will be created.
|
||||
///
|
||||
/// On success, the buffer size of the stream is returned.
|
||||
fn get_or_create_output_stream(
|
||||
impl Device {
|
||||
pub fn build_input_stream<D, E>(
|
||||
&self,
|
||||
driver: &sys::Driver,
|
||||
format: &Format,
|
||||
device: &Device,
|
||||
) -> Result<usize, BuildStreamError> {
|
||||
match device.default_output_format() {
|
||||
Ok(f) => {
|
||||
let num_asio_channels = f.channels;
|
||||
check_format(driver, format, num_asio_channels)
|
||||
},
|
||||
Err(_) => Err(BuildStreamError::FormatNotSupported),
|
||||
}?;
|
||||
let num_channels = format.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock().unwrap();
|
||||
// Either create a stream if there's none or return the size of the current one.
|
||||
match streams.output {
|
||||
Some(ref output) => Ok(output.buffer_size as usize),
|
||||
None => {
|
||||
let input = streams.input.take();
|
||||
driver
|
||||
.prepare_output_stream(input, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.output {
|
||||
Some(ref out) => out.buffer_size as usize,
|
||||
None => unreachable!(),
|
||||
};
|
||||
*streams = new_streams;
|
||||
bs
|
||||
}).map_err(|ref e| {
|
||||
println!("Error preparing stream: {}", e);
|
||||
BuildStreamError::DeviceNotAvailable
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds a new cpal input stream
|
||||
pub fn build_input_stream(
|
||||
&self,
|
||||
device: &Device,
|
||||
format: &Format,
|
||||
) -> Result<StreamId, BuildStreamError> {
|
||||
let Device { driver, .. } = device;
|
||||
let stream_type = driver.input_data_type().map_err(build_stream_err)?;
|
||||
mut data_callback: D,
|
||||
_error_callback: E,
|
||||
) -> Result<Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static
|
||||
{
|
||||
let stream_type = self.driver.input_data_type().map_err(build_stream_err)?;
|
||||
|
||||
// Ensure that the desired sample type is supported.
|
||||
let data_type = super::device::convert_data_type(&stream_type)
|
||||
|
@ -188,48 +81,36 @@ impl EventLoop {
|
|||
}
|
||||
|
||||
let num_channels = format.channels.clone();
|
||||
let stream_buffer_size = self.get_or_create_input_stream(&driver, format, device)?;
|
||||
let cpal_num_samples = stream_buffer_size * num_channels as usize;
|
||||
let count = self.stream_count.fetch_add(1, Ordering::SeqCst);
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
let cpal_streams = self.cpal_streams.clone();
|
||||
let callbacks = self.callbacks.clone();
|
||||
let buffer_size = self.get_or_create_input_stream(format)?;
|
||||
let cpal_num_samples = buffer_size * num_channels as usize;
|
||||
|
||||
// Create the buffer depending on the size of the data type.
|
||||
let stream_id = StreamId(count);
|
||||
let len_bytes = cpal_num_samples * data_type.sample_size();
|
||||
let mut interleaved = vec![0u8; len_bytes];
|
||||
|
||||
let stream_playing = Arc::new(AtomicBool::new(false));
|
||||
let playing = Arc::clone(&stream_playing);
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
|
||||
// Set the input callback.
|
||||
// This is most performance critical part of the ASIO bindings.
|
||||
driver.set_callback(move |buffer_index| unsafe {
|
||||
let callback_id = self.driver.add_callback(move |buffer_index| unsafe {
|
||||
// If not playing return early.
|
||||
// TODO: Don't assume `count` is valid - we should search for the matching `StreamId`.
|
||||
if let Some(s) = cpal_streams.lock().unwrap().get(count) {
|
||||
if let Some(s) = s {
|
||||
if !s.playing {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if !playing.load(Ordering::SeqCst) {
|
||||
return
|
||||
}
|
||||
|
||||
// Acquire the stream and callback.
|
||||
let stream_lock = asio_streams.lock().unwrap();
|
||||
// There is 0% chance of lock contention the host only locks when recreating streams.
|
||||
let stream_lock = asio_streams.lock();
|
||||
let ref asio_stream = match stream_lock.input {
|
||||
Some(ref asio_stream) => asio_stream,
|
||||
None => return,
|
||||
};
|
||||
let mut callbacks = callbacks.lock().unwrap();
|
||||
let callback = match callbacks.as_mut() {
|
||||
Some(callback) => callback,
|
||||
None => return,
|
||||
};
|
||||
|
||||
/// 1. Write from the ASIO buffer to the interleaved CPAL buffer.
|
||||
/// 2. Deliver the CPAL buffer to the user callback.
|
||||
unsafe fn process_input_callback<A, B, F, G>(
|
||||
stream_id: StreamId,
|
||||
callback: &mut (dyn FnMut(StreamId, StreamDataResult) + Send),
|
||||
unsafe fn process_input_callback<A, B, D, F, G>(
|
||||
callback: &mut D,
|
||||
interleaved: &mut [u8],
|
||||
asio_stream: &sys::AsioStream,
|
||||
buffer_index: usize,
|
||||
|
@ -239,6 +120,7 @@ impl EventLoop {
|
|||
where
|
||||
A: AsioSample,
|
||||
B: InterleavedSample,
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
F: Fn(A) -> A,
|
||||
G: Fn(A) -> B,
|
||||
{
|
||||
|
@ -254,16 +136,14 @@ impl EventLoop {
|
|||
|
||||
// 2. Deliver the interleaved buffer to the callback.
|
||||
callback(
|
||||
stream_id,
|
||||
Ok(StreamData::Input { buffer: B::unknown_type_input_buffer(interleaved) }),
|
||||
StreamData::Input { buffer: B::unknown_type_input_buffer(interleaved) },
|
||||
);
|
||||
}
|
||||
|
||||
match (&stream_type, data_type) {
|
||||
(&sys::AsioSampleType::ASIOSTInt16LSB, SampleFormat::I16) => {
|
||||
process_input_callback::<i16, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<i16, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -272,9 +152,8 @@ impl EventLoop {
|
|||
);
|
||||
}
|
||||
(&sys::AsioSampleType::ASIOSTInt16MSB, SampleFormat::I16) => {
|
||||
process_input_callback::<i16, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<i16, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -287,9 +166,8 @@ impl EventLoop {
|
|||
// trait for the `to_le` and `to_be` methods, but this does not support floats.
|
||||
(&sys::AsioSampleType::ASIOSTFloat32LSB, SampleFormat::F32) |
|
||||
(&sys::AsioSampleType::ASIOSTFloat32MSB, SampleFormat::F32) => {
|
||||
process_input_callback::<f32, f32, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<f32, f32, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -302,9 +180,8 @@ impl EventLoop {
|
|||
// `process_output_callback` function above by removing the unnecessary sample
|
||||
// conversion function.
|
||||
(&sys::AsioSampleType::ASIOSTInt32LSB, SampleFormat::I16) => {
|
||||
process_input_callback::<i32, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<i32, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -313,9 +190,8 @@ impl EventLoop {
|
|||
);
|
||||
}
|
||||
(&sys::AsioSampleType::ASIOSTInt32MSB, SampleFormat::I16) => {
|
||||
process_input_callback::<i32, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<i32, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -327,9 +203,8 @@ impl EventLoop {
|
|||
// trait for the `to_le` and `to_be` methods, but this does not support floats.
|
||||
(&sys::AsioSampleType::ASIOSTFloat64LSB, SampleFormat::F32) |
|
||||
(&sys::AsioSampleType::ASIOSTFloat64MSB, SampleFormat::F32) => {
|
||||
process_input_callback::<f64, f32, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_input_callback::<f64, f32, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
asio_stream,
|
||||
buffer_index as usize,
|
||||
|
@ -345,23 +220,31 @@ impl EventLoop {
|
|||
}
|
||||
});
|
||||
|
||||
// Create stream and set to paused
|
||||
self.cpal_streams
|
||||
.lock()
|
||||
.unwrap()
|
||||
.push(Some(Stream { driver: driver.clone(), playing: false }));
|
||||
let driver = self.driver.clone();
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
|
||||
Ok(StreamId(count))
|
||||
// Immediately start the device?
|
||||
self.driver.start().map_err(build_stream_err)?;
|
||||
|
||||
Ok(Stream {
|
||||
playing: stream_playing,
|
||||
driver,
|
||||
asio_streams,
|
||||
callback_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Create the an output cpal stream.
|
||||
pub fn build_output_stream(
|
||||
pub fn build_output_stream<D, E>(
|
||||
&self,
|
||||
device: &Device,
|
||||
format: &Format,
|
||||
) -> Result<StreamId, BuildStreamError> {
|
||||
let Device { driver, .. } = device;
|
||||
let stream_type = driver.output_data_type().map_err(build_stream_err)?;
|
||||
mut data_callback: D,
|
||||
_error_callback: E,
|
||||
) -> Result<Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
let stream_type = self.driver.output_data_type().map_err(build_stream_err)?;
|
||||
|
||||
// Ensure that the desired sample type is supported.
|
||||
let data_type = super::device::convert_data_type(&stream_type)
|
||||
|
@ -371,38 +254,30 @@ impl EventLoop {
|
|||
}
|
||||
|
||||
let num_channels = format.channels.clone();
|
||||
let stream_buffer_size = self.get_or_create_output_stream(&driver, format, device)?;
|
||||
let cpal_num_samples = stream_buffer_size * num_channels as usize;
|
||||
let count = self.stream_count.fetch_add(1, Ordering::SeqCst);
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
let cpal_streams = self.cpal_streams.clone();
|
||||
let callbacks = self.callbacks.clone();
|
||||
let buffer_size = self.get_or_create_output_stream(format)?;
|
||||
let cpal_num_samples = buffer_size * num_channels as usize;
|
||||
|
||||
// Create buffers depending on data type.
|
||||
let stream_id = StreamId(count);
|
||||
let len_bytes = cpal_num_samples * data_type.sample_size();
|
||||
let mut interleaved = vec![0u8; len_bytes];
|
||||
let mut silence_asio_buffer = SilenceAsioBuffer::default();
|
||||
|
||||
driver.set_callback(move |buffer_index| unsafe {
|
||||
let stream_playing = Arc::new(AtomicBool::new(false));
|
||||
let playing = Arc::clone(&stream_playing);
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
|
||||
let callback_id = self.driver.add_callback(move |buffer_index| unsafe {
|
||||
// If not playing, return early.
|
||||
// TODO: Don't assume `count` is valid - we should search for the matching `StreamId`.
|
||||
if let Some(s) = cpal_streams.lock().unwrap().get(count) {
|
||||
if let Some(s) = s {
|
||||
if !s.playing {
|
||||
return ();
|
||||
}
|
||||
}
|
||||
if !playing.load(Ordering::SeqCst) {
|
||||
return
|
||||
}
|
||||
|
||||
// Acquire the stream and callback.
|
||||
let stream_lock = asio_streams.lock().unwrap();
|
||||
// There is 0% chance of lock contention the host only locks when recreating streams.
|
||||
let stream_lock = asio_streams.lock();
|
||||
let ref asio_stream = match stream_lock.output {
|
||||
Some(ref asio_stream) => asio_stream,
|
||||
None => return,
|
||||
};
|
||||
let mut callbacks = callbacks.lock().unwrap();
|
||||
let callback = callbacks.as_mut();
|
||||
|
||||
// Silence the ASIO buffer that is about to be used.
|
||||
//
|
||||
|
@ -430,9 +305,8 @@ impl EventLoop {
|
|||
/// 2. If required, silence the ASIO buffer.
|
||||
/// 3. Finally, write the interleaved data to the non-interleaved ASIO buffer,
|
||||
/// performing endianness conversions as necessary.
|
||||
unsafe fn process_output_callback<A, B, F, G>(
|
||||
stream_id: StreamId,
|
||||
callback: Option<&mut &mut (dyn FnMut(StreamId, StreamDataResult) + Send)>,
|
||||
unsafe fn process_output_callback<A, B, D, F, G>(
|
||||
callback: &mut D,
|
||||
interleaved: &mut [u8],
|
||||
silence_asio_buffer: bool,
|
||||
asio_stream: &sys::AsioStream,
|
||||
|
@ -443,18 +317,14 @@ impl EventLoop {
|
|||
where
|
||||
A: InterleavedSample,
|
||||
B: AsioSample,
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
F: Fn(A) -> B,
|
||||
G: Fn(B) -> B,
|
||||
{
|
||||
// 1. Render interleaved buffer from callback.
|
||||
let interleaved: &mut [A] = cast_slice_mut(interleaved);
|
||||
match callback {
|
||||
None => interleaved.iter_mut().for_each(|s| *s = A::SILENCE),
|
||||
Some(callback) => {
|
||||
let buffer = A::unknown_type_output_buffer(interleaved);
|
||||
callback(stream_id, Ok(StreamData::Output { buffer }));
|
||||
}
|
||||
}
|
||||
let buffer = A::unknown_type_output_buffer(interleaved);
|
||||
callback(StreamData::Output { buffer });
|
||||
|
||||
// 2. Silence ASIO channels if necessary.
|
||||
let n_channels = interleaved.len() / asio_stream.buffer_size as usize;
|
||||
|
@ -478,9 +348,8 @@ impl EventLoop {
|
|||
|
||||
match (data_type, &stream_type) {
|
||||
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16LSB) => {
|
||||
process_output_callback::<i16, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<i16, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -490,9 +359,8 @@ impl EventLoop {
|
|||
);
|
||||
}
|
||||
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16MSB) => {
|
||||
process_output_callback::<i16, i16, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<i16, i16, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -506,9 +374,8 @@ impl EventLoop {
|
|||
// trait for the `to_le` and `to_be` methods, but this does not support floats.
|
||||
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32LSB) |
|
||||
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32MSB) => {
|
||||
process_output_callback::<f32, f32, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<f32, f32, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -522,9 +389,8 @@ impl EventLoop {
|
|||
// `process_output_callback` function above by removing the unnecessary sample
|
||||
// conversion function.
|
||||
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32LSB) => {
|
||||
process_output_callback::<i16, i32, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<i16, i32, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -534,9 +400,8 @@ impl EventLoop {
|
|||
);
|
||||
}
|
||||
(SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt32MSB) => {
|
||||
process_output_callback::<i16, i32, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<i16, i32, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -549,9 +414,8 @@ impl EventLoop {
|
|||
// trait for the `to_le` and `to_be` methods, but this does not support floats.
|
||||
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64LSB) |
|
||||
(SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat64MSB) => {
|
||||
process_output_callback::<f32, f64, _, _>(
|
||||
stream_id,
|
||||
callback,
|
||||
process_output_callback::<f32, f64, _, _, _>(
|
||||
&mut data_callback,
|
||||
&mut interleaved,
|
||||
silence,
|
||||
asio_stream,
|
||||
|
@ -568,78 +432,104 @@ impl EventLoop {
|
|||
}
|
||||
});
|
||||
|
||||
// Create the stream paused
|
||||
self.cpal_streams
|
||||
.lock()
|
||||
.unwrap()
|
||||
.push(Some(Stream { driver: driver.clone(), playing: false }));
|
||||
let driver = self.driver.clone();
|
||||
let asio_streams = self.asio_streams.clone();
|
||||
|
||||
// Give the ID based on the stream count
|
||||
Ok(StreamId(count))
|
||||
// Immediately start the device?
|
||||
self.driver.start().map_err(build_stream_err)?;
|
||||
|
||||
Ok(Stream {
|
||||
playing: stream_playing,
|
||||
driver,
|
||||
asio_streams,
|
||||
callback_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Play the cpal stream for the given ID.
|
||||
pub fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> {
|
||||
let mut streams = self.cpal_streams.lock().unwrap();
|
||||
if let Some(s) = streams.get_mut(stream_id.0).expect("Bad play stream index") {
|
||||
s.playing = true;
|
||||
// Calling play when already playing is a no-op
|
||||
s.driver.start().map_err(play_stream_err)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Pause the cpal stream for the given ID.
|
||||
/// Create a new CPAL Input Stream.
|
||||
///
|
||||
/// Pause the ASIO streams if there are no other CPAL streams playing, as ASIO only allows
|
||||
/// stopping the entire driver.
|
||||
pub fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> {
|
||||
let mut streams = self.cpal_streams.lock().unwrap();
|
||||
let streams_playing = streams.iter()
|
||||
.filter(|s| s.as_ref().map(|s| s.playing).unwrap_or(false))
|
||||
.count();
|
||||
if let Some(s) = streams.get_mut(stream_id.0).expect("Bad pause stream index") {
|
||||
if streams_playing <= 1 {
|
||||
s.driver.stop().map_err(pause_stream_err)?;
|
||||
/// If there is no existing ASIO Input Stream it will be created.
|
||||
///
|
||||
/// On success, the buffer size of the stream is returned.
|
||||
fn get_or_create_input_stream(
|
||||
&self,
|
||||
format: &Format,
|
||||
) -> Result<usize, BuildStreamError> {
|
||||
match self.default_input_format() {
|
||||
Ok(f) => {
|
||||
let num_asio_channels = f.channels;
|
||||
check_format(&self.driver, format, num_asio_channels)
|
||||
},
|
||||
Err(_) => Err(BuildStreamError::FormatNotSupported),
|
||||
}?;
|
||||
let num_channels = format.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock();
|
||||
// Either create a stream if thers none or had back the
|
||||
// size of the current one.
|
||||
match streams.input {
|
||||
Some(ref input) => Ok(input.buffer_size as usize),
|
||||
None => {
|
||||
let output = streams.output.take();
|
||||
self.driver
|
||||
.prepare_input_stream(output, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.input {
|
||||
Some(ref inp) => inp.buffer_size as usize,
|
||||
None => unreachable!(),
|
||||
};
|
||||
*streams = new_streams;
|
||||
bs
|
||||
}).map_err(|ref e| {
|
||||
println!("Error preparing stream: {}", e);
|
||||
BuildStreamError::DeviceNotAvailable
|
||||
})
|
||||
}
|
||||
s.playing = false;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Destroy the cpal stream based on the ID.
|
||||
pub fn destroy_stream(&self, stream_id: StreamId) {
|
||||
// TODO: Should we not also remove an ASIO stream here?
|
||||
// Yes, and we should update the logic in the callbacks to search for the stream with
|
||||
// the matching ID, rather than assuming the index associated with the ID is valid.
|
||||
let mut streams = self.cpal_streams.lock().unwrap();
|
||||
streams.get_mut(stream_id.0).take();
|
||||
}
|
||||
|
||||
/// Run the cpal callbacks
|
||||
pub fn run<F>(&self, mut callback: F) -> !
|
||||
where
|
||||
F: FnMut(StreamId, StreamDataResult) + Send,
|
||||
{
|
||||
let callback: &mut (FnMut(StreamId, StreamDataResult) + Send) = &mut callback;
|
||||
// Transmute needed to convince the compiler that the callback has a static lifetime
|
||||
*self.callbacks.lock().unwrap() = Some(unsafe { mem::transmute(callback) });
|
||||
loop {
|
||||
// A sleep here to prevent the loop being
|
||||
// removed in --release
|
||||
thread::sleep(Duration::new(1u64, 0u32));
|
||||
/// Create a new CPAL Output Stream.
|
||||
///
|
||||
/// If there is no existing ASIO Output Stream it will be created.
|
||||
fn get_or_create_output_stream(
|
||||
&self,
|
||||
format: &Format,
|
||||
) -> Result<usize, BuildStreamError> {
|
||||
match self.default_output_format() {
|
||||
Ok(f) => {
|
||||
let num_asio_channels = f.channels;
|
||||
check_format(&self.driver, format, num_asio_channels)
|
||||
},
|
||||
Err(_) => Err(BuildStreamError::FormatNotSupported),
|
||||
}?;
|
||||
let num_channels = format.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock();
|
||||
// Either create a stream if thers none or had back the
|
||||
// size of the current one.
|
||||
match streams.output {
|
||||
Some(ref output) => Ok(output.buffer_size as usize),
|
||||
None => {
|
||||
let output = streams.output.take();
|
||||
self.driver
|
||||
.prepare_output_stream(output, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.output {
|
||||
Some(ref out) => out.buffer_size as usize,
|
||||
None => unreachable!(),
|
||||
};
|
||||
*streams = new_streams;
|
||||
bs
|
||||
}).map_err(|ref e| {
|
||||
println!("Error preparing stream: {}", e);
|
||||
BuildStreamError::DeviceNotAvailable
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean up if event loop is dropped.
|
||||
/// Currently event loop is never dropped.
|
||||
impl Drop for EventLoop {
|
||||
impl Drop for Stream {
|
||||
fn drop(&mut self) {
|
||||
*self.asio_streams.lock().unwrap() = sys::AsioStreams {
|
||||
output: None,
|
||||
input: None,
|
||||
};
|
||||
self.driver.remove_callback(self.callback_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -790,25 +680,3 @@ fn build_stream_err(e: sys::AsioError) -> BuildStreamError {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pause_stream_err(e: sys::AsioError) -> PauseStreamError {
|
||||
match e {
|
||||
sys::AsioError::NoDrivers |
|
||||
sys::AsioError::HardwareMalfunction => PauseStreamError::DeviceNotAvailable,
|
||||
err => {
|
||||
let description = format!("{}", err);
|
||||
BackendSpecificError { description }.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn play_stream_err(e: sys::AsioError) -> PlayStreamError {
|
||||
match e {
|
||||
sys::AsioError::NoDrivers |
|
||||
sys::AsioError::HardwareMalfunction => PlayStreamError::DeviceNotAvailable,
|
||||
err => {
|
||||
let description = format!("{}", err);
|
||||
BackendSpecificError { description }.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,20 +14,18 @@ use SupportedFormatsError;
|
|||
use SampleFormat;
|
||||
use SampleRate;
|
||||
use StreamData;
|
||||
use StreamDataResult;
|
||||
use StreamError;
|
||||
use SupportedFormat;
|
||||
use UnknownTypeInputBuffer;
|
||||
use UnknownTypeOutputBuffer;
|
||||
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait};
|
||||
use traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
use std::ffi::CStr;
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
use std::cell::RefCell;
|
||||
use std::os::raw::c_char;
|
||||
use std::ptr::null;
|
||||
use std::sync::{Arc, Condvar, Mutex};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use std::slice;
|
||||
|
||||
use self::coreaudio::audio_unit::{AudioUnit, Scope, Element};
|
||||
|
@ -87,7 +85,6 @@ impl Host {
|
|||
impl HostTrait for Host {
|
||||
type Devices = Devices;
|
||||
type Device = Device;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
// Assume coreaudio is always available on macOS and iOS.
|
||||
|
@ -105,15 +102,12 @@ impl HostTrait for Host {
|
|||
fn default_output_device(&self) -> Option<Self::Device> {
|
||||
default_output_device()
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
EventLoop::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Device::name(self)
|
||||
|
@ -134,50 +128,16 @@ impl DeviceTrait for Device {
|
|||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_output_format(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventLoopTrait for EventLoop {
|
||||
type Device = Device;
|
||||
type StreamId = StreamId;
|
||||
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_input_stream(self, device, format)
|
||||
fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
|
||||
Device::build_input_stream(self, format, data_callback, error_callback)
|
||||
}
|
||||
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_output_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
|
||||
EventLoop::play_stream(self, stream)
|
||||
}
|
||||
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
|
||||
EventLoop::pause_stream(self, stream)
|
||||
}
|
||||
|
||||
fn destroy_stream(&self, stream: Self::StreamId) {
|
||||
EventLoop::destroy_stream(self, stream)
|
||||
}
|
||||
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
where
|
||||
F: FnMut(Self::StreamId, StreamDataResult) + Send,
|
||||
{
|
||||
EventLoop::run(self, callback)
|
||||
fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
|
||||
Device::build_output_stream(self, format, data_callback, error_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl StreamIdTrait for StreamId {}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Device {
|
||||
audio_device_id: AudioDeviceID,
|
||||
|
@ -420,31 +380,6 @@ impl fmt::Debug for Device {
|
|||
}
|
||||
}
|
||||
|
||||
// The ID of a stream is its index within the `streams` array of the events loop.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StreamId(usize);
|
||||
|
||||
pub struct EventLoop {
|
||||
// This `Arc` is shared with all the callbacks of coreaudio.
|
||||
//
|
||||
// TODO: Eventually, CPAL's API should be changed to allow for submitting a unique callback per
|
||||
// stream to avoid streams blocking one another.
|
||||
user_callback: Arc<Mutex<UserCallback>>,
|
||||
streams: Mutex<Vec<Option<StreamInner>>>,
|
||||
loop_cond: Arc<(Mutex<bool>, Condvar)>,
|
||||
}
|
||||
|
||||
enum UserCallback {
|
||||
// When `run` is called with a callback, that callback will be stored here.
|
||||
//
|
||||
// It is essential for the safety of the program that this callback is removed before `run`
|
||||
// returns (not possible with the current CPAL API).
|
||||
Active(&'static mut (dyn FnMut(StreamId, StreamDataResult) + Send)),
|
||||
// A queue of events that have occurred but that have not yet been emitted to the user as we
|
||||
// don't yet have a callback to do so.
|
||||
Inactive,
|
||||
}
|
||||
|
||||
struct StreamInner {
|
||||
playing: bool,
|
||||
audio_unit: AudioUnit,
|
||||
|
@ -540,75 +475,8 @@ fn audio_unit_from_device(device: &Device, input: bool) -> Result<AudioUnit, cor
|
|||
Ok(audio_unit)
|
||||
}
|
||||
|
||||
impl EventLoop {
|
||||
#[inline]
|
||||
fn new() -> EventLoop {
|
||||
EventLoop {
|
||||
user_callback: Arc::new(Mutex::new(UserCallback::Inactive)),
|
||||
streams: Mutex::new(Vec::new()),
|
||||
loop_cond: Arc::new((Mutex::new(false), Condvar::new())),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn run<F>(&self, mut callback: F) -> !
|
||||
where F: FnMut(StreamId, StreamDataResult) + Send
|
||||
{
|
||||
{
|
||||
let mut guard = self.user_callback.lock().unwrap();
|
||||
if let UserCallback::Active(_) = *guard {
|
||||
panic!("`EventLoop::run` was called when the event loop was already running");
|
||||
}
|
||||
let callback: &mut (dyn FnMut(StreamId, StreamDataResult) + Send) = &mut callback;
|
||||
*guard = UserCallback::Active(unsafe { mem::transmute(callback) });
|
||||
}
|
||||
|
||||
// Wait on a condvar to notify, which should never happen.
|
||||
let &(ref lock, ref cvar) = &*self.loop_cond;
|
||||
let mut running = lock.lock().unwrap();
|
||||
*running = true;
|
||||
while *running {
|
||||
running = cvar.wait(running).unwrap();
|
||||
}
|
||||
|
||||
unreachable!("current `EventLoop` API requires that `run` may not return");
|
||||
|
||||
// It is critical that we remove the callback before returning (currently not possible).
|
||||
// *self.user_callback.lock().unwrap() = UserCallback::Inactive;
|
||||
}
|
||||
|
||||
fn next_stream_id(&self) -> usize {
|
||||
let streams_lock = self.streams.lock().unwrap();
|
||||
let stream_id = streams_lock
|
||||
.iter()
|
||||
.position(|n| n.is_none())
|
||||
.unwrap_or(streams_lock.len());
|
||||
stream_id
|
||||
}
|
||||
|
||||
// Add the stream to the list of streams within `self`.
|
||||
fn add_stream(&self, stream_id: usize, au: AudioUnit, device_id: AudioDeviceID) {
|
||||
let inner = StreamInner {
|
||||
playing: true,
|
||||
audio_unit: au,
|
||||
device_id: device_id,
|
||||
};
|
||||
|
||||
let mut streams_lock = self.streams.lock().unwrap();
|
||||
if stream_id == streams_lock.len() {
|
||||
streams_lock.push(Some(inner));
|
||||
} else {
|
||||
streams_lock[stream_id] = Some(inner);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Device,
|
||||
format: &Format,
|
||||
) -> Result<StreamId, BuildStreamError>
|
||||
{
|
||||
impl Device {
|
||||
fn build_input_stream<D, E>(&self, format: &Format, mut data_callback: D, _error_callback: E) -> Result<Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
|
||||
// The scope and element for working with a device's input stream.
|
||||
let scope = Scope::Output;
|
||||
let element = Element::Input;
|
||||
|
@ -624,7 +492,7 @@ impl EventLoop {
|
|||
let sample_rate: f64 = 0.0;
|
||||
let data_size = mem::size_of::<f64>() as u32;
|
||||
let status = AudioObjectGetPropertyData(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
0,
|
||||
null(),
|
||||
|
@ -635,26 +503,11 @@ impl EventLoop {
|
|||
|
||||
// If the requested sample rate is different to the device sample rate, update the device.
|
||||
if sample_rate as u32 != format.sample_rate.0 {
|
||||
|
||||
// In order to avoid breaking existing input streams we return an error if there is
|
||||
// already an active input stream for this device with the actual sample rate.
|
||||
for stream in &*self.streams.lock().unwrap() {
|
||||
if let Some(stream) = stream.as_ref() {
|
||||
if stream.device_id == device.audio_device_id {
|
||||
let description = "cannot change device sample rate for stream as an \
|
||||
existing stream is already running at the current sample rate"
|
||||
.into();
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get available sample rate ranges.
|
||||
property_address.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;
|
||||
let data_size = 0u32;
|
||||
let status = AudioObjectGetPropertyDataSize(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
0,
|
||||
null(),
|
||||
|
@ -665,7 +518,7 @@ impl EventLoop {
|
|||
let mut ranges: Vec<u8> = vec![];
|
||||
ranges.reserve_exact(data_size as usize);
|
||||
let status = AudioObjectGetPropertyData(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
0,
|
||||
null(),
|
||||
|
@ -719,7 +572,7 @@ impl EventLoop {
|
|||
// Add our sample rate change listener callback.
|
||||
let reported_rate: f64 = 0.0;
|
||||
let status = AudioObjectAddPropertyListener(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
Some(rate_listener),
|
||||
&reported_rate as *const _ as *mut _,
|
||||
|
@ -729,7 +582,7 @@ impl EventLoop {
|
|||
// Finally, set the sample rate.
|
||||
let sample_rate = sample_rate as f64;
|
||||
let status = AudioObjectSetPropertyData(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
0,
|
||||
null(),
|
||||
|
@ -753,7 +606,7 @@ impl EventLoop {
|
|||
|
||||
// Remove the `rate_listener` callback.
|
||||
let status = AudioObjectRemovePropertyListener(
|
||||
device.audio_device_id,
|
||||
self.audio_device_id,
|
||||
&property_address as *const _,
|
||||
Some(rate_listener),
|
||||
&reported_rate as *const _ as *mut _,
|
||||
|
@ -762,18 +615,14 @@ impl EventLoop {
|
|||
}
|
||||
}
|
||||
|
||||
let mut audio_unit = audio_unit_from_device(device, true)?;
|
||||
let mut audio_unit = audio_unit_from_device(self, true)?;
|
||||
|
||||
// Set the stream in interleaved mode.
|
||||
let asbd = asbd_from_format(format);
|
||||
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
|
||||
|
||||
// Determine the future ID of the stream.
|
||||
let stream_id = self.next_stream_id();
|
||||
|
||||
// Register the callback that is being called by coreaudio whenever it needs data to be
|
||||
// fed to the audio buffer.
|
||||
let user_callback = self.user_callback.clone();
|
||||
let sample_format = format.data_type;
|
||||
let bytes_per_channel = format.data_type.sample_size();
|
||||
type Args = render_callback::Args<data::Raw>;
|
||||
|
@ -789,20 +638,14 @@ impl EventLoop {
|
|||
mData: data
|
||||
} = buffers[0];
|
||||
|
||||
let mut user_callback = user_callback.lock().unwrap();
|
||||
|
||||
// A small macro to simplify handling the callback for different sample types.
|
||||
macro_rules! try_callback {
|
||||
($SampleFormat:ident, $SampleType:ty) => {{
|
||||
let data_len = (data_byte_size as usize / bytes_per_channel) as usize;
|
||||
let data_slice = slice::from_raw_parts(data as *const $SampleType, data_len);
|
||||
let callback = match *user_callback {
|
||||
UserCallback::Active(ref mut cb) => cb,
|
||||
UserCallback::Inactive => return Ok(()),
|
||||
};
|
||||
let unknown_type_buffer = UnknownTypeInputBuffer::$SampleFormat(::InputBuffer { buffer: data_slice });
|
||||
let stream_data = StreamData::Input { buffer: unknown_type_buffer };
|
||||
callback(StreamId(stream_id), Ok(stream_data));
|
||||
data_callback(stream_data);
|
||||
}};
|
||||
}
|
||||
|
||||
|
@ -815,23 +658,17 @@ impl EventLoop {
|
|||
Ok(())
|
||||
})?;
|
||||
|
||||
// TODO: start playing now? is that consistent with the other backends?
|
||||
audio_unit.start()?;
|
||||
|
||||
// Add the stream to the list of streams within `self`.
|
||||
self.add_stream(stream_id, audio_unit, device.audio_device_id);
|
||||
|
||||
Ok(StreamId(stream_id))
|
||||
Ok(Stream::new(StreamInner {
|
||||
playing: true,
|
||||
audio_unit,
|
||||
device_id: self.audio_device_id,
|
||||
}))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Device,
|
||||
format: &Format,
|
||||
) -> Result<StreamId, BuildStreamError>
|
||||
{
|
||||
let mut audio_unit = audio_unit_from_device(device, false)?;
|
||||
fn build_output_stream<D, E>(&self, format: &Format, mut data_callback: D, _error_callback: E) -> Result<Stream, BuildStreamError> where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
|
||||
let mut audio_unit = audio_unit_from_device(self, false)?;
|
||||
|
||||
// The scope and element for working with a device's output stream.
|
||||
let scope = Scope::Input;
|
||||
|
@ -841,12 +678,8 @@ impl EventLoop {
|
|||
let asbd = asbd_from_format(format);
|
||||
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
|
||||
|
||||
// Determine the future ID of the stream.
|
||||
let stream_id = self.next_stream_id();
|
||||
|
||||
// Register the callback that is being called by coreaudio whenever it needs data to be
|
||||
// fed to the audio buffer.
|
||||
let user_callback = self.user_callback.clone();
|
||||
let sample_format = format.data_type;
|
||||
let bytes_per_channel = format.data_type.sample_size();
|
||||
type Args = render_callback::Args<data::Raw>;
|
||||
|
@ -860,25 +693,14 @@ impl EventLoop {
|
|||
mData: data
|
||||
} = (*args.data.data).mBuffers[0];
|
||||
|
||||
let mut user_callback = user_callback.lock().unwrap();
|
||||
|
||||
// A small macro to simplify handling the callback for different sample types.
|
||||
macro_rules! try_callback {
|
||||
($SampleFormat:ident, $SampleType:ty, $equilibrium:expr) => {{
|
||||
let data_len = (data_byte_size as usize / bytes_per_channel) as usize;
|
||||
let data_slice = slice::from_raw_parts_mut(data as *mut $SampleType, data_len);
|
||||
let callback = match *user_callback {
|
||||
UserCallback::Active(ref mut cb) => cb,
|
||||
UserCallback::Inactive => {
|
||||
for sample in data_slice.iter_mut() {
|
||||
*sample = $equilibrium;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
let unknown_type_buffer = UnknownTypeOutputBuffer::$SampleFormat(::OutputBuffer { buffer: data_slice });
|
||||
let stream_data = StreamData::Output { buffer: unknown_type_buffer };
|
||||
callback(StreamId(stream_id), Ok(stream_data));
|
||||
data_callback(stream_data);
|
||||
}};
|
||||
}
|
||||
|
||||
|
@ -891,25 +713,31 @@ impl EventLoop {
|
|||
Ok(())
|
||||
})?;
|
||||
|
||||
// TODO: start playing now? is that consistent with the other backends?
|
||||
audio_unit.start()?;
|
||||
|
||||
// Add the stream to the list of streams within `self`.
|
||||
self.add_stream(stream_id, audio_unit, device.audio_device_id);
|
||||
|
||||
Ok(StreamId(stream_id))
|
||||
Ok(Stream::new(StreamInner {
|
||||
playing: true,
|
||||
audio_unit,
|
||||
device_id: self.audio_device_id,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn destroy_stream(&self, stream_id: StreamId) {
|
||||
{
|
||||
let mut streams = self.streams.lock().unwrap();
|
||||
streams[stream_id.0] = None;
|
||||
pub struct Stream {
|
||||
inner: RefCell<StreamInner>,
|
||||
}
|
||||
|
||||
impl Stream {
|
||||
fn new(inner: StreamInner) -> Self {
|
||||
Self {
|
||||
inner: RefCell::new(inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> {
|
||||
let mut streams = self.streams.lock().unwrap();
|
||||
let stream = streams[stream_id.0].as_mut().unwrap();
|
||||
impl StreamTrait for Stream {
|
||||
fn play(&self) -> Result<(), PlayStreamError> {
|
||||
let mut stream = self.inner.borrow_mut();
|
||||
|
||||
if !stream.playing {
|
||||
if let Err(e) = stream.audio_unit.start() {
|
||||
|
@ -922,9 +750,8 @@ impl EventLoop {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> {
|
||||
let mut streams = self.streams.lock().unwrap();
|
||||
let stream = streams[stream_id.0].as_mut().unwrap();
|
||||
fn pause(&self) -> Result<(), PauseStreamError> {
|
||||
let mut stream = self.inner.borrow_mut();
|
||||
|
||||
if stream.playing {
|
||||
if let Err(e) = stream.audio_unit.stop() {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::mem;
|
||||
use std::os::raw::c_void;
|
||||
use std::slice::from_raw_parts;
|
||||
use std::sync::Mutex;
|
||||
use stdweb;
|
||||
use stdweb::Reference;
|
||||
use stdweb::unstable::TryInto;
|
||||
|
@ -17,25 +16,102 @@ use PauseStreamError;
|
|||
use PlayStreamError;
|
||||
use SupportedFormatsError;
|
||||
use StreamData;
|
||||
use StreamDataResult;
|
||||
use StreamError;
|
||||
use SupportedFormat;
|
||||
use UnknownTypeOutputBuffer;
|
||||
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait};
|
||||
use traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
// The emscripten backend currently works by instantiating an `AudioContext` object per `Stream`.
|
||||
// Creating a stream creates a new `AudioContext`. Destroying a stream destroys it. Creation of a
|
||||
// `Host` instance initializes the `stdweb` context.
|
||||
|
||||
/// The default emscripten host type.
|
||||
#[derive(Debug)]
|
||||
pub struct Host;
|
||||
|
||||
/// Content is false if the iterator is empty.
|
||||
pub struct Devices(bool);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Device;
|
||||
|
||||
pub struct Stream {
|
||||
// A reference to an `AudioContext` object.
|
||||
audio_ctxt_ref: Reference,
|
||||
}
|
||||
|
||||
// Index within the `streams` array of the events loop.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StreamId(usize);
|
||||
|
||||
pub type SupportedInputFormats = ::std::vec::IntoIter<SupportedFormat>;
|
||||
pub type SupportedOutputFormats = ::std::vec::IntoIter<SupportedFormat>;
|
||||
|
||||
impl Host {
|
||||
pub fn new() -> Result<Self, crate::HostUnavailable> {
|
||||
stdweb::initialize();
|
||||
Ok(Host)
|
||||
}
|
||||
}
|
||||
|
||||
impl Devices {
|
||||
fn new() -> Result<Self, DevicesError> {
|
||||
Ok(Self::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl Device {
|
||||
#[inline]
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Ok("Default Device".to_owned())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn supported_input_formats(&self) -> Result<SupportedInputFormats, SupportedFormatsError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> {
|
||||
// TODO: right now cpal's API doesn't allow flexibility here
|
||||
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
|
||||
// this ever becomes more flexible, don't forget to change that
|
||||
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
|
||||
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
|
||||
//
|
||||
// UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and
|
||||
// filter out those that lay outside the range specified above.
|
||||
Ok(
|
||||
vec![
|
||||
SupportedFormat {
|
||||
channels: 2,
|
||||
min_sample_rate: ::SampleRate(44100),
|
||||
max_sample_rate: ::SampleRate(44100),
|
||||
data_type: ::SampleFormat::F32,
|
||||
},
|
||||
].into_iter(),
|
||||
)
|
||||
}
|
||||
|
||||
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
// TODO: because it is hard coded, see supported_output_formats.
|
||||
Ok(
|
||||
Format {
|
||||
channels: 2,
|
||||
sample_rate: ::SampleRate(44100),
|
||||
data_type: ::SampleFormat::F32,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HostTrait for Host {
|
||||
type Devices = Devices;
|
||||
type Device = Device;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
// Assume this host is always available on emscripten.
|
||||
|
@ -53,15 +129,12 @@ impl HostTrait for Host {
|
|||
fn default_output_device(&self) -> Option<Self::Device> {
|
||||
default_output_device()
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
EventLoop::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Device::name(self)
|
||||
|
@ -82,224 +155,124 @@ impl DeviceTrait for Device {
|
|||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_output_format(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventLoopTrait for EventLoop {
|
||||
type Device = Device;
|
||||
type StreamId = StreamId;
|
||||
|
||||
fn build_input_stream(
|
||||
fn build_input_stream<D, E>(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_input_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_output_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
|
||||
EventLoop::play_stream(self, stream)
|
||||
}
|
||||
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
|
||||
EventLoop::pause_stream(self, stream)
|
||||
}
|
||||
|
||||
fn destroy_stream(&self, stream: Self::StreamId) {
|
||||
EventLoop::destroy_stream(self, stream)
|
||||
}
|
||||
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
_format: &Format,
|
||||
_data_callback: D,
|
||||
_error_callback: E,
|
||||
) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
F: FnMut(Self::StreamId, StreamDataResult) + Send,
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
EventLoop::run(self, callback)
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn build_output_stream<D, E>(
|
||||
&self,
|
||||
_format: &Format,
|
||||
data_callback: D,
|
||||
error_callback: E,
|
||||
) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
// Create the stream.
|
||||
let audio_ctxt_ref = js!(return new AudioContext()).into_reference().unwrap();
|
||||
let stream = Stream { audio_ctxt_ref };
|
||||
|
||||
// Specify the callback.
|
||||
let mut user_data = (self, data_callback, error_callback);
|
||||
let user_data_ptr = &mut user_data as *mut (_, _, _);
|
||||
|
||||
// Use `set_timeout` to invoke a Rust callback repeatedly.
|
||||
//
|
||||
// The job of this callback is to fill the content of the audio buffers.
|
||||
//
|
||||
// See also: The call to `set_timeout` at the end of the `audio_callback_fn` which creates
|
||||
// the loop.
|
||||
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr as *mut c_void), 10);
|
||||
|
||||
Ok(stream)
|
||||
}
|
||||
}
|
||||
|
||||
impl StreamIdTrait for StreamId {}
|
||||
|
||||
// The emscripten backend works by having a global variable named `_cpal_audio_contexts`, which
|
||||
// is an array of `AudioContext` objects. A stream ID corresponds to an entry in this array.
|
||||
//
|
||||
// Creating a stream creates a new `AudioContext`. Destroying a stream destroys it.
|
||||
|
||||
// TODO: handle latency better ; right now we just use setInterval with the amount of sound data
|
||||
// that is in each buffer ; this is obviously bad, and also the schedule is too tight and there may
|
||||
// be underflows
|
||||
|
||||
pub struct EventLoop {
|
||||
streams: Mutex<Vec<Option<Reference>>>,
|
||||
}
|
||||
|
||||
impl EventLoop {
|
||||
#[inline]
|
||||
pub fn new() -> EventLoop {
|
||||
stdweb::initialize();
|
||||
EventLoop {
|
||||
streams: Mutex::new(Vec::new()),
|
||||
}
|
||||
impl StreamTrait for Stream {
|
||||
fn play(&self) -> Result<(), PlayStreamError> {
|
||||
let audio_ctxt = &self.audio_ctxt_ref;
|
||||
js!(@{audio_ctxt}.resume());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
where F: FnMut(StreamId, StreamDataResult),
|
||||
{
|
||||
// The `run` function uses `set_timeout` to invoke a Rust callback repeatidely. The job
|
||||
// of this callback is to fill the content of the audio buffers.
|
||||
fn pause(&self) -> Result<(), PauseStreamError> {
|
||||
let audio_ctxt = &self.audio_ctxt_ref;
|
||||
js!(@{audio_ctxt}.suspend());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// The first argument of the callback function (a `void*`) is a casted pointer to `self`
|
||||
// and to the `callback` parameter that was passed to `run`.
|
||||
// The first argument of the callback function (a `void*`) is a casted pointer to `self`
|
||||
// and to the `callback` parameter that was passed to `run`.
|
||||
fn audio_callback_fn<D, E>(user_data_ptr: *mut c_void)
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
unsafe {
|
||||
let user_data_ptr2 = user_data_ptr as *mut (&Stream, D, E);
|
||||
let user_data = &mut *user_data_ptr2;
|
||||
let (ref stream, ref mut data_cb, ref mut _err_cb) = user_data;
|
||||
let audio_ctxt = &stream.audio_ctxt_ref;
|
||||
|
||||
// TODO: We should be re-using a buffer.
|
||||
let mut temporary_buffer = vec![0.0; 44100 * 2 / 3];
|
||||
|
||||
fn callback_fn<F>(user_data_ptr: *mut c_void)
|
||||
where F: FnMut(StreamId, StreamDataResult)
|
||||
{
|
||||
unsafe {
|
||||
let user_data_ptr2 = user_data_ptr as *mut (&EventLoop, F);
|
||||
let user_data = &mut *user_data_ptr2;
|
||||
let user_cb = &mut user_data.1;
|
||||
|
||||
let streams = user_data.0.streams.lock().unwrap().clone();
|
||||
for (stream_id, stream) in streams.iter().enumerate() {
|
||||
let stream = match stream.as_ref() {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let mut temporary_buffer = vec![0.0; 44100 * 2 / 3];
|
||||
|
||||
{
|
||||
let buffer = UnknownTypeOutputBuffer::F32(::OutputBuffer { buffer: &mut temporary_buffer });
|
||||
let data = StreamData::Output { buffer: buffer };
|
||||
user_cb(StreamId(stream_id), Ok(data));
|
||||
// TODO: directly use a TypedArray<f32> once this is supported by stdweb
|
||||
}
|
||||
|
||||
let typed_array = {
|
||||
let f32_slice = temporary_buffer.as_slice();
|
||||
let u8_slice: &[u8] = from_raw_parts(
|
||||
f32_slice.as_ptr() as *const _,
|
||||
f32_slice.len() * mem::size_of::<f32>(),
|
||||
);
|
||||
let typed_array: TypedArray<u8> = u8_slice.into();
|
||||
typed_array
|
||||
};
|
||||
|
||||
let num_channels = 2u32; // TODO: correct value
|
||||
debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
|
||||
|
||||
js!(
|
||||
var src_buffer = new Float32Array(@{typed_array}.buffer);
|
||||
var context = @{stream};
|
||||
var buf_len = @{temporary_buffer.len() as u32};
|
||||
var num_channels = @{num_channels};
|
||||
|
||||
var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
|
||||
for (var channel = 0; channel < num_channels; ++channel) {
|
||||
var buffer_content = buffer.getChannelData(channel);
|
||||
for (var i = 0; i < buf_len / num_channels; ++i) {
|
||||
buffer_content[i] = src_buffer[i * num_channels + channel];
|
||||
}
|
||||
}
|
||||
|
||||
var node = context.createBufferSource();
|
||||
node.buffer = buffer;
|
||||
node.connect(context.destination);
|
||||
node.start();
|
||||
);
|
||||
}
|
||||
|
||||
set_timeout(|| callback_fn::<F>(user_data_ptr), 330);
|
||||
}
|
||||
let buffer = UnknownTypeOutputBuffer::F32(::OutputBuffer { buffer: &mut temporary_buffer });
|
||||
let data = StreamData::Output { buffer: buffer };
|
||||
data_cb(data);
|
||||
}
|
||||
|
||||
let mut user_data = (self, callback);
|
||||
let user_data_ptr = &mut user_data as *mut (_, _);
|
||||
|
||||
set_timeout(|| callback_fn::<F>(user_data_ptr as *mut _), 10);
|
||||
|
||||
stdweb::event_loop();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_input_stream(&self, _: &Device, _format: &Format) -> Result<StreamId, BuildStreamError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_output_stream(&self, _: &Device, _format: &Format) -> Result<StreamId, BuildStreamError> {
|
||||
let stream = js!(return new AudioContext()).into_reference().unwrap();
|
||||
|
||||
let mut streams = self.streams.lock().unwrap();
|
||||
let stream_id = if let Some(pos) = streams.iter().position(|v| v.is_none()) {
|
||||
streams[pos] = Some(stream);
|
||||
pos
|
||||
} else {
|
||||
let l = streams.len();
|
||||
streams.push(Some(stream));
|
||||
l
|
||||
// TODO: directly use a TypedArray<f32> once this is supported by stdweb
|
||||
let typed_array = {
|
||||
let f32_slice = temporary_buffer.as_slice();
|
||||
let u8_slice: &[u8] = from_raw_parts(
|
||||
f32_slice.as_ptr() as *const _,
|
||||
f32_slice.len() * mem::size_of::<f32>(),
|
||||
);
|
||||
let typed_array: TypedArray<u8> = u8_slice.into();
|
||||
typed_array
|
||||
};
|
||||
|
||||
Ok(StreamId(stream_id))
|
||||
}
|
||||
let num_channels = 2u32; // TODO: correct value
|
||||
debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
|
||||
|
||||
#[inline]
|
||||
fn destroy_stream(&self, stream_id: StreamId) {
|
||||
self.streams.lock().unwrap()[stream_id.0] = None;
|
||||
}
|
||||
js!(
|
||||
var src_buffer = new Float32Array(@{typed_array}.buffer);
|
||||
var context = @{audio_ctxt};
|
||||
var buf_len = @{temporary_buffer.len() as u32};
|
||||
var num_channels = @{num_channels};
|
||||
|
||||
#[inline]
|
||||
fn play_stream(&self, stream_id: StreamId) -> Result<(), PlayStreamError> {
|
||||
let streams = self.streams.lock().unwrap();
|
||||
let stream = streams
|
||||
.get(stream_id.0)
|
||||
.and_then(|v| v.as_ref())
|
||||
.expect("invalid stream ID");
|
||||
js!(@{stream}.resume());
|
||||
Ok(())
|
||||
}
|
||||
var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
|
||||
for (var channel = 0; channel < num_channels; ++channel) {
|
||||
var buffer_content = buffer.getChannelData(channel);
|
||||
for (var i = 0; i < buf_len / num_channels; ++i) {
|
||||
buffer_content[i] = src_buffer[i * num_channels + channel];
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn pause_stream(&self, stream_id: StreamId) -> Result<(), PauseStreamError> {
|
||||
let streams = self.streams.lock().unwrap();
|
||||
let stream = streams
|
||||
.get(stream_id.0)
|
||||
.and_then(|v| v.as_ref())
|
||||
.expect("invalid stream ID");
|
||||
js!(@{stream}.suspend());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
var node = context.createBufferSource();
|
||||
node.buffer = buffer;
|
||||
node.connect(context.destination);
|
||||
node.start();
|
||||
);
|
||||
|
||||
// Index within the `streams` array of the events loop.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StreamId(usize);
|
||||
|
||||
// Detects whether the `AudioContext` global variable is available.
|
||||
fn is_webaudio_available() -> bool {
|
||||
stdweb::initialize();
|
||||
|
||||
js!(if (!AudioContext) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}).try_into()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// Content is false if the iterator is empty.
|
||||
pub struct Devices(bool);
|
||||
|
||||
impl Devices {
|
||||
fn new() -> Result<Self, DevicesError> {
|
||||
Ok(Self::default())
|
||||
// TODO: handle latency better ; right now we just use setInterval with the amount of sound
|
||||
// data that is in each buffer ; this is obviously bad, and also the schedule is too tight
|
||||
// and there may be underflows
|
||||
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr), 330);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -336,54 +309,13 @@ fn default_output_device() -> Option<Device> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Device;
|
||||
|
||||
impl Device {
|
||||
#[inline]
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Ok("Default Device".to_owned())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn supported_input_formats(&self) -> Result<SupportedInputFormats, SupportedFormatsError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> {
|
||||
// TODO: right now cpal's API doesn't allow flexibility here
|
||||
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
|
||||
// this ever becomes more flexible, don't forget to change that
|
||||
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
|
||||
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
|
||||
Ok(
|
||||
vec![
|
||||
SupportedFormat {
|
||||
channels: 2,
|
||||
min_sample_rate: ::SampleRate(44100),
|
||||
max_sample_rate: ::SampleRate(44100),
|
||||
data_type: ::SampleFormat::F32,
|
||||
},
|
||||
].into_iter(),
|
||||
)
|
||||
}
|
||||
|
||||
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
// TODO: because it is hard coded, see supported_output_formats.
|
||||
Ok(
|
||||
Format {
|
||||
channels: 2,
|
||||
sample_rate: ::SampleRate(44100),
|
||||
data_type: ::SampleFormat::F32,
|
||||
},
|
||||
)
|
||||
}
|
||||
// Detects whether the `AudioContext` global variable is available.
|
||||
fn is_webaudio_available() -> bool {
|
||||
stdweb::initialize();
|
||||
js!(if (!AudioContext) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}).try_into()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub type SupportedInputFormats = ::std::vec::IntoIter<SupportedFormat>;
|
||||
pub type SupportedOutputFormats = ::std::vec::IntoIter<SupportedFormat>;
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
use BuildStreamError;
|
||||
use DefaultFormatError;
|
||||
use DevicesError;
|
||||
|
@ -7,10 +5,11 @@ use DeviceNameError;
|
|||
use Format;
|
||||
use PauseStreamError;
|
||||
use PlayStreamError;
|
||||
use StreamDataResult;
|
||||
use StreamData;
|
||||
use StreamError;
|
||||
use SupportedFormatsError;
|
||||
use SupportedFormat;
|
||||
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait};
|
||||
use traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Devices;
|
||||
|
@ -18,17 +17,16 @@ pub struct Devices;
|
|||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Device;
|
||||
|
||||
pub struct EventLoop;
|
||||
|
||||
pub struct Host;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StreamId;
|
||||
pub struct Stream;
|
||||
|
||||
pub struct SupportedInputFormats;
|
||||
pub struct SupportedOutputFormats;
|
||||
|
||||
impl Host {
|
||||
#[allow(dead_code)]
|
||||
pub fn new() -> Result<Self, crate::HostUnavailable> {
|
||||
Ok(Host)
|
||||
}
|
||||
|
@ -40,15 +38,10 @@ impl Devices {
|
|||
}
|
||||
}
|
||||
|
||||
impl EventLoop {
|
||||
pub fn new() -> EventLoop {
|
||||
EventLoop
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
#[inline]
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
|
@ -74,49 +67,22 @@ impl DeviceTrait for Device {
|
|||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventLoopTrait for EventLoop {
|
||||
type Device = Device;
|
||||
type StreamId = StreamId;
|
||||
|
||||
#[inline]
|
||||
fn run<F>(&self, _callback: F) -> !
|
||||
where F: FnMut(StreamId, StreamDataResult)
|
||||
{
|
||||
loop { /* TODO: don't spin */ }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_input_stream(&self, _: &Device, _: &Format) -> Result<StreamId, BuildStreamError> {
|
||||
Err(BuildStreamError::DeviceNotAvailable)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn build_output_stream(&self, _: &Device, _: &Format) -> Result<StreamId, BuildStreamError> {
|
||||
Err(BuildStreamError::DeviceNotAvailable)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn destroy_stream(&self, _: StreamId) {
|
||||
fn build_input_stream<D, E>(&self, _format: &Format, _data_callback: D, _error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn play_stream(&self, _: StreamId) -> Result<(), PlayStreamError> {
|
||||
panic!()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn pause_stream(&self, _: StreamId) -> Result<(), PauseStreamError> {
|
||||
panic!()
|
||||
/// Create an output stream.
|
||||
fn build_output_stream<D, E>(&self, _format: &Format, _data_callback: D, _error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static{
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl HostTrait for Host {
|
||||
type Device = Device;
|
||||
type Devices = Devices;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
false
|
||||
|
@ -133,13 +99,17 @@ impl HostTrait for Host {
|
|||
fn default_output_device(&self) -> Option<Device> {
|
||||
None
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
EventLoop::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl StreamIdTrait for StreamId {}
|
||||
impl StreamTrait for Stream {
|
||||
fn play(&self) -> Result<(), PlayStreamError> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn pause(&self) -> Result<(), PauseStreamError> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Devices {
|
||||
type Item = Device;
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
use super::check_result;
|
||||
use std::ptr;
|
||||
|
||||
use super::winapi::um::objbase::{COINIT_MULTITHREADED};
|
||||
use super::winapi::um::combaseapi::{CoInitializeEx, CoUninitialize};
|
||||
use super::winapi::um::objbase::COINIT_MULTITHREADED;
|
||||
|
||||
thread_local!(static COM_INITIALIZED: ComInitialized = {
|
||||
unsafe {
|
||||
|
|
|
@ -7,66 +7,54 @@ use std::ops::{Deref, DerefMut};
|
|||
use std::os::windows::ffi::OsStringExt;
|
||||
use std::ptr;
|
||||
use std::slice;
|
||||
use std::sync::{Arc, Mutex, MutexGuard};
|
||||
use std::sync::{Arc, Mutex, MutexGuard, atomic::Ordering};
|
||||
|
||||
use BackendSpecificError;
|
||||
use DefaultFormatError;
|
||||
use DeviceNameError;
|
||||
use DevicesError;
|
||||
use Format;
|
||||
use SupportedFormatsError;
|
||||
use SampleFormat;
|
||||
use SampleRate;
|
||||
use SupportedFormat;
|
||||
use SupportedFormatsError;
|
||||
use COMMON_SAMPLE_RATES;
|
||||
|
||||
use super::check_result;
|
||||
use super::check_result_backend_specific;
|
||||
use super::com;
|
||||
use super::winapi::Interface;
|
||||
use super::winapi::ctypes::c_void;
|
||||
use super::winapi::shared::devpkey;
|
||||
use super::winapi::shared::guiddef::GUID;
|
||||
use super::winapi::shared::ksmedia;
|
||||
use super::winapi::shared::guiddef::{
|
||||
GUID,
|
||||
};
|
||||
use super::winapi::shared::winerror;
|
||||
use super::winapi::shared::minwindef::{
|
||||
DWORD,
|
||||
};
|
||||
use super::winapi::shared::minwindef::{DWORD, WORD};
|
||||
use super::winapi::shared::mmreg;
|
||||
use super::winapi::shared::winerror;
|
||||
use super::winapi::shared::wtypes;
|
||||
use super::winapi::Interface;
|
||||
// https://msdn.microsoft.com/en-us/library/cc230355.aspx
|
||||
use super::winapi::um::winnt::LPWSTR;
|
||||
use super::winapi::um::winnt::WCHAR;
|
||||
use super::winapi::um::coml2api;
|
||||
use super::winapi::um::audioclient::{
|
||||
IAudioClient,
|
||||
IID_IAudioClient,
|
||||
AUDCLNT_E_DEVICE_INVALIDATED,
|
||||
self, IAudioClient, IID_IAudioClient, AUDCLNT_E_DEVICE_INVALIDATED,
|
||||
};
|
||||
use super::winapi::um::audiosessiontypes::{
|
||||
AUDCLNT_SHAREMODE_SHARED,
|
||||
AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
|
||||
};
|
||||
use super::winapi::um::combaseapi::{
|
||||
CoCreateInstance,
|
||||
CoTaskMemFree,
|
||||
CLSCTX_ALL,
|
||||
PropVariantClear,
|
||||
CoCreateInstance, CoTaskMemFree, PropVariantClear, CLSCTX_ALL,
|
||||
};
|
||||
use super::winapi::um::coml2api;
|
||||
use super::winapi::um::mmdeviceapi::{
|
||||
eAll,
|
||||
eCapture,
|
||||
eConsole,
|
||||
eRender,
|
||||
CLSID_MMDeviceEnumerator,
|
||||
DEVICE_STATE_ACTIVE,
|
||||
EDataFlow,
|
||||
IMMDevice,
|
||||
IMMDeviceCollection,
|
||||
IMMDeviceEnumerator,
|
||||
IMMEndpoint,
|
||||
eAll, eCapture, eConsole, eRender, CLSID_MMDeviceEnumerator, EDataFlow, IMMDevice,
|
||||
IMMDeviceCollection, IMMDeviceEnumerator, IMMEndpoint, DEVICE_STATE_ACTIVE,
|
||||
};
|
||||
use super::winapi::um::winnt::LPWSTR;
|
||||
use super::winapi::um::winnt::WCHAR;
|
||||
|
||||
use super::{
|
||||
stream::{AudioClientFlow, Stream, StreamInner},
|
||||
winapi::um::synchapi,
|
||||
};
|
||||
use crate::{traits::DeviceTrait, BuildStreamError, StreamData, StreamError};
|
||||
|
||||
pub type SupportedInputFormats = std::vec::IntoIter<SupportedFormat>;
|
||||
pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
|
||||
|
@ -74,10 +62,8 @@ pub type SupportedOutputFormats = std::vec::IntoIter<SupportedFormat>;
|
|||
/// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers.
|
||||
#[derive(Copy, Clone)]
|
||||
struct IAudioClientWrapper(*mut IAudioClient);
|
||||
unsafe impl Send for IAudioClientWrapper {
|
||||
}
|
||||
unsafe impl Sync for IAudioClientWrapper {
|
||||
}
|
||||
unsafe impl Send for IAudioClientWrapper {}
|
||||
unsafe impl Sync for IAudioClientWrapper {}
|
||||
|
||||
/// An opaque type that identifies an end point.
|
||||
pub struct Device {
|
||||
|
@ -87,6 +73,70 @@ pub struct Device {
|
|||
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Device::name(self)
|
||||
}
|
||||
|
||||
fn supported_input_formats(
|
||||
&self,
|
||||
) -> Result<Self::SupportedInputFormats, SupportedFormatsError> {
|
||||
Device::supported_input_formats(self)
|
||||
}
|
||||
|
||||
fn supported_output_formats(
|
||||
&self,
|
||||
) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> {
|
||||
Device::supported_output_formats(self)
|
||||
}
|
||||
|
||||
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_input_format(self)
|
||||
}
|
||||
|
||||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_output_format(self)
|
||||
}
|
||||
|
||||
fn build_input_stream<D, E>(
|
||||
&self,
|
||||
format: &Format,
|
||||
data_callback: D,
|
||||
error_callback: E,
|
||||
) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
Ok(Stream::new(
|
||||
self.build_input_stream_inner(format)?,
|
||||
data_callback,
|
||||
error_callback,
|
||||
))
|
||||
}
|
||||
|
||||
fn build_output_stream<D, E>(
|
||||
&self,
|
||||
format: &Format,
|
||||
data_callback: D,
|
||||
error_callback: E,
|
||||
) -> Result<Self::Stream, BuildStreamError>
|
||||
where
|
||||
D: FnMut(StreamData) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
Ok(Stream::new(
|
||||
self.build_output_stream_inner(format)?,
|
||||
data_callback,
|
||||
error_callback,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct Endpoint {
|
||||
endpoint: *mut IMMEndpoint,
|
||||
}
|
||||
|
@ -107,7 +157,6 @@ impl Drop for WaveFormatExPtr {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
impl WaveFormat {
|
||||
// Given a pointer to some format, returns a valid copy of the format.
|
||||
pub fn copy_from_waveformatex_ptr(ptr: *const mmreg::WAVEFORMATEX) -> Option<Self> {
|
||||
|
@ -115,11 +164,11 @@ impl WaveFormat {
|
|||
match (*ptr).wFormatTag {
|
||||
mmreg::WAVE_FORMAT_PCM | mmreg::WAVE_FORMAT_IEEE_FLOAT => {
|
||||
Some(WaveFormat::Ex(*ptr))
|
||||
},
|
||||
}
|
||||
mmreg::WAVE_FORMAT_EXTENSIBLE => {
|
||||
let extensible_ptr = ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
|
||||
Some(WaveFormat::Extensible(*extensible_ptr))
|
||||
},
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -150,11 +199,12 @@ impl DerefMut for WaveFormat {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
unsafe fn immendpoint_from_immdevice(device: *const IMMDevice) -> *mut IMMEndpoint {
|
||||
let mut endpoint: *mut IMMEndpoint = mem::uninitialized();
|
||||
check_result((*device).QueryInterface(&IMMEndpoint::uuidof(), &mut endpoint as *mut _ as *mut _))
|
||||
.expect("could not query IMMDevice interface for IMMEndpoint");
|
||||
check_result(
|
||||
(*device).QueryInterface(&IMMEndpoint::uuidof(), &mut endpoint as *mut _ as *mut _),
|
||||
)
|
||||
.expect("could not query IMMDevice interface for IMMEndpoint");
|
||||
endpoint
|
||||
}
|
||||
|
||||
|
@ -169,10 +219,7 @@ unsafe fn data_flow_from_immendpoint(endpoint: *const IMMEndpoint) -> EDataFlow
|
|||
pub unsafe fn is_format_supported(
|
||||
client: *const IAudioClient,
|
||||
waveformatex_ptr: *const mmreg::WAVEFORMATEX,
|
||||
) -> Result<bool, SupportedFormatsError>
|
||||
{
|
||||
|
||||
|
||||
) -> Result<bool, SupportedFormatsError> {
|
||||
/*
|
||||
// `IsFormatSupported` checks whether the format is supported and fills
|
||||
// a `WAVEFORMATEX`
|
||||
|
@ -205,7 +252,6 @@ pub unsafe fn is_format_supported(
|
|||
};
|
||||
*/
|
||||
|
||||
|
||||
// Check if the given format is supported.
|
||||
let is_supported = |waveformatex_ptr, mut closest_waveformatex_ptr| {
|
||||
let result = (*client).IsFormatSupported(
|
||||
|
@ -217,17 +263,11 @@ pub unsafe fn is_format_supported(
|
|||
// has been found, but not an exact match) so we also treat this as unsupported.
|
||||
match (result, check_result(result)) {
|
||||
(_, Err(ref e)) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
return Err(SupportedFormatsError::DeviceNotAvailable);
|
||||
},
|
||||
(_, Err(_)) => {
|
||||
Ok(false)
|
||||
},
|
||||
(winerror::S_FALSE, _) => {
|
||||
Ok(false)
|
||||
},
|
||||
(_, Ok(())) => {
|
||||
Ok(true)
|
||||
},
|
||||
Err(SupportedFormatsError::DeviceNotAvailable)
|
||||
}
|
||||
(_, Err(_)) => Ok(false),
|
||||
(winerror::S_FALSE, _) => Ok(false),
|
||||
(_, Ok(())) => Ok(true),
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -240,34 +280,30 @@ pub unsafe fn is_format_supported(
|
|||
let mut closest_waveformatex = *waveformatex_ptr;
|
||||
let closest_waveformatex_ptr = &mut closest_waveformatex as *mut _;
|
||||
is_supported(waveformatex_ptr, closest_waveformatex_ptr)
|
||||
},
|
||||
}
|
||||
mmreg::WAVE_FORMAT_EXTENSIBLE => {
|
||||
let waveformatextensible_ptr =
|
||||
waveformatex_ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
|
||||
let waveformatextensible_ptr = waveformatex_ptr as *const mmreg::WAVEFORMATEXTENSIBLE;
|
||||
let mut closest_waveformatextensible = *waveformatextensible_ptr;
|
||||
let closest_waveformatextensible_ptr =
|
||||
&mut closest_waveformatextensible as *mut _;
|
||||
let closest_waveformatextensible_ptr = &mut closest_waveformatextensible as *mut _;
|
||||
let closest_waveformatex_ptr =
|
||||
closest_waveformatextensible_ptr as *mut mmreg::WAVEFORMATEX;
|
||||
is_supported(waveformatex_ptr, closest_waveformatex_ptr)
|
||||
},
|
||||
}
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Get a cpal Format from a WAVEFORMATEX.
|
||||
unsafe fn format_from_waveformatex_ptr(
|
||||
waveformatex_ptr: *const mmreg::WAVEFORMATEX,
|
||||
) -> Option<Format>
|
||||
{
|
||||
) -> Option<Format> {
|
||||
fn cmp_guid(a: &GUID, b: &GUID) -> bool {
|
||||
a.Data1 == b.Data1
|
||||
&& a.Data2 == b.Data2
|
||||
&& a.Data3 == b.Data3
|
||||
&& a.Data4 == b.Data4
|
||||
a.Data1 == b.Data1 && a.Data2 == b.Data2 && a.Data3 == b.Data3 && a.Data4 == b.Data4
|
||||
}
|
||||
let data_type = match ((*waveformatex_ptr).wBitsPerSample, (*waveformatex_ptr).wFormatTag) {
|
||||
let data_type = match (
|
||||
(*waveformatex_ptr).wBitsPerSample,
|
||||
(*waveformatex_ptr).wFormatTag,
|
||||
) {
|
||||
(16, mmreg::WAVE_FORMAT_PCM) => SampleFormat::I16,
|
||||
(32, mmreg::WAVE_FORMAT_IEEE_FLOAT) => SampleFormat::F32,
|
||||
(n_bits, mmreg::WAVE_FORMAT_EXTENSIBLE) => {
|
||||
|
@ -280,22 +316,20 @@ unsafe fn format_from_waveformatex_ptr(
|
|||
} else {
|
||||
return None;
|
||||
}
|
||||
},
|
||||
}
|
||||
// Unknown data format returned by GetMixFormat.
|
||||
_ => return None,
|
||||
};
|
||||
let format = Format {
|
||||
channels: (*waveformatex_ptr).nChannels as _,
|
||||
sample_rate: SampleRate((*waveformatex_ptr).nSamplesPerSec),
|
||||
data_type: data_type,
|
||||
data_type,
|
||||
};
|
||||
Some(format)
|
||||
}
|
||||
|
||||
unsafe impl Send for Device {
|
||||
}
|
||||
unsafe impl Sync for Device {
|
||||
}
|
||||
unsafe impl Send for Device {}
|
||||
unsafe impl Sync for Device {}
|
||||
|
||||
impl Device {
|
||||
pub fn name(&self) -> Result<String, DeviceNameError> {
|
||||
|
@ -306,12 +340,10 @@ impl Device {
|
|||
|
||||
// Get the endpoint's friendly-name property.
|
||||
let mut property_value = mem::zeroed();
|
||||
if let Err(err) = check_result(
|
||||
(*property_store).GetValue(
|
||||
&devpkey::DEVPKEY_Device_FriendlyName as *const _ as *const _,
|
||||
&mut property_value
|
||||
)
|
||||
) {
|
||||
if let Err(err) = check_result((*property_store).GetValue(
|
||||
&devpkey::DEVPKEY_Device_FriendlyName as *const _ as *const _,
|
||||
&mut property_value,
|
||||
)) {
|
||||
let description = format!("failed to retrieve name from property store: {}", err);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
|
@ -319,13 +351,14 @@ impl Device {
|
|||
|
||||
// Read the friendly-name from the union data field, expecting a *const u16.
|
||||
if property_value.vt != wtypes::VT_LPWSTR as _ {
|
||||
let description =
|
||||
format!("property store produced invalid data: {:?}", property_value.vt);
|
||||
let description = format!(
|
||||
"property store produced invalid data: {:?}",
|
||||
property_value.vt
|
||||
);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
let ptr_usize: usize = *(&property_value.data as *const _ as *const usize);
|
||||
let ptr_utf16 = ptr_usize as *const u16;
|
||||
let ptr_utf16 = *(&property_value.data as *const _ as *const (*const u16));
|
||||
|
||||
// Find the length of the friendly name.
|
||||
let mut len = 0;
|
||||
|
@ -351,14 +384,15 @@ impl Device {
|
|||
#[inline]
|
||||
fn from_immdevice(device: *mut IMMDevice) -> Self {
|
||||
Device {
|
||||
device: device,
|
||||
device,
|
||||
future_audio_client: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it.
|
||||
fn ensure_future_audio_client(&self)
|
||||
-> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
|
||||
fn ensure_future_audio_client(
|
||||
&self,
|
||||
) -> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
|
||||
let mut lock = self.future_audio_client.lock().unwrap();
|
||||
if lock.is_some() {
|
||||
return Ok(lock);
|
||||
|
@ -366,10 +400,12 @@ impl Device {
|
|||
|
||||
let audio_client: *mut IAudioClient = unsafe {
|
||||
let mut audio_client = mem::uninitialized();
|
||||
let hresult = (*self.device).Activate(&IID_IAudioClient,
|
||||
CLSCTX_ALL,
|
||||
ptr::null_mut(),
|
||||
&mut audio_client);
|
||||
let hresult = (*self.device).Activate(
|
||||
&IID_IAudioClient,
|
||||
CLSCTX_ALL,
|
||||
ptr::null_mut(),
|
||||
&mut audio_client,
|
||||
);
|
||||
|
||||
// can fail if the device has been disconnected since we enumerated it, or if
|
||||
// the device doesn't support playback for some reason
|
||||
|
@ -416,7 +452,7 @@ impl Device {
|
|||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
},
|
||||
}
|
||||
};
|
||||
let client = lock.unwrap().0;
|
||||
|
||||
|
@ -427,16 +463,19 @@ impl Device {
|
|||
Ok(()) => (),
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
return Err(SupportedFormatsError::DeviceNotAvailable);
|
||||
},
|
||||
}
|
||||
Err(e) => {
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
// If the default format can't succeed we have no hope of finding other formats.
|
||||
assert_eq!(try!(is_format_supported(client, default_waveformatex_ptr.0)), true);
|
||||
assert_eq!(
|
||||
is_format_supported(client, default_waveformatex_ptr.0)?,
|
||||
true
|
||||
);
|
||||
|
||||
// Copy the format to use as a test format (as to avoid mutating the original format).
|
||||
let mut test_format = {
|
||||
|
@ -457,8 +496,8 @@ impl Device {
|
|||
let rate = rate.0 as DWORD;
|
||||
test_format.nSamplesPerSec = rate;
|
||||
test_format.nAvgBytesPerSec =
|
||||
rate * (*default_waveformatex_ptr.0).nBlockAlign as DWORD;
|
||||
if try!(is_format_supported(client, test_format.as_ptr())) {
|
||||
rate * u32::from((*default_waveformatex_ptr.0).nBlockAlign);
|
||||
if is_format_supported(client, test_format.as_ptr())? {
|
||||
supported_sample_rates.push(rate);
|
||||
}
|
||||
}
|
||||
|
@ -503,7 +542,9 @@ impl Device {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> {
|
||||
pub fn supported_output_formats(
|
||||
&self,
|
||||
) -> Result<SupportedOutputFormats, SupportedFormatsError> {
|
||||
if self.data_flow() == eRender {
|
||||
self.supported_formats()
|
||||
// If it's an input device, assume no output formats.
|
||||
|
@ -538,12 +579,12 @@ impl Device {
|
|||
match check_result((*client).GetMixFormat(&mut format_ptr.0)) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
return Err(DefaultFormatError::DeviceNotAvailable);
|
||||
},
|
||||
}
|
||||
Err(e) => {
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
},
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
|
@ -573,6 +614,295 @@ impl Device {
|
|||
Err(DefaultFormatError::StreamTypeNotSupported)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_input_stream_inner(
|
||||
&self,
|
||||
format: &Format,
|
||||
) -> Result<StreamInner, BuildStreamError> {
|
||||
unsafe {
|
||||
// Making sure that COM is initialized.
|
||||
// It's not actually sure that this is required, but when in doubt do it.
|
||||
com::com_initialized();
|
||||
|
||||
// Obtaining a `IAudioClient`.
|
||||
let audio_client = match self.build_audioclient() {
|
||||
Ok(client) => client,
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
return Err(BuildStreamError::DeviceNotAvailable)
|
||||
}
|
||||
Err(e) => {
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
||||
// Computing the format and initializing the device.
|
||||
let waveformatex = {
|
||||
let format_attempt = format_to_waveformatextensible(format)
|
||||
.ok_or(BuildStreamError::FormatNotSupported)?;
|
||||
let share_mode = AUDCLNT_SHAREMODE_SHARED;
|
||||
|
||||
// Ensure the format is supported.
|
||||
match super::device::is_format_supported(audio_client, &format_attempt.Format) {
|
||||
Ok(false) => return Err(BuildStreamError::FormatNotSupported),
|
||||
Err(_) => return Err(BuildStreamError::DeviceNotAvailable),
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// finally initializing the audio client
|
||||
let hresult = (*audio_client).Initialize(
|
||||
share_mode,
|
||||
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
|
||||
0,
|
||||
0,
|
||||
&format_attempt.Format,
|
||||
ptr::null(),
|
||||
);
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
format_attempt.Format
|
||||
};
|
||||
|
||||
// obtaining the size of the samples buffer in number of frames
|
||||
let max_frames_in_buffer = {
|
||||
let mut max_frames_in_buffer = mem::uninitialized();
|
||||
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
|
||||
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
max_frames_in_buffer
|
||||
};
|
||||
|
||||
// Creating the event that will be signalled whenever we need to submit some samples.
|
||||
let event = {
|
||||
let event = synchapi::CreateEventA(ptr::null_mut(), 0, 0, ptr::null());
|
||||
if event.is_null() {
|
||||
(*audio_client).Release();
|
||||
let description = "failed to create event".to_string();
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
if let Err(e) = check_result((*audio_client).SetEventHandle(event)) {
|
||||
(*audio_client).Release();
|
||||
let description = format!("failed to call SetEventHandle: {}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
event
|
||||
};
|
||||
|
||||
// Building a `IAudioCaptureClient` that will be used to read captured samples.
|
||||
let capture_client = {
|
||||
let mut capture_client: *mut audioclient::IAudioCaptureClient =
|
||||
mem::uninitialized();
|
||||
let hresult = (*audio_client).GetService(
|
||||
&audioclient::IID_IAudioCaptureClient,
|
||||
&mut capture_client as *mut *mut audioclient::IAudioCaptureClient as *mut _,
|
||||
);
|
||||
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("failed to build capture client: {}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
&mut *capture_client
|
||||
};
|
||||
|
||||
// Once we built the `StreamInner`, we add a command that will be picked up by the
|
||||
// `run()` method and added to the `RunContext`.
|
||||
let client_flow = AudioClientFlow::Capture { capture_client };
|
||||
|
||||
Ok(StreamInner {
|
||||
audio_client,
|
||||
client_flow,
|
||||
event,
|
||||
playing: false,
|
||||
max_frames_in_buffer,
|
||||
bytes_per_frame: waveformatex.nBlockAlign,
|
||||
sample_format: format.data_type,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_output_stream_inner(
|
||||
&self,
|
||||
format: &Format,
|
||||
) -> Result<StreamInner, BuildStreamError> {
|
||||
unsafe {
|
||||
// Making sure that COM is initialized.
|
||||
// It's not actually sure that this is required, but when in doubt do it.
|
||||
com::com_initialized();
|
||||
|
||||
// Obtaining a `IAudioClient`.
|
||||
let audio_client = match self.build_audioclient() {
|
||||
Ok(client) => client,
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
return Err(BuildStreamError::DeviceNotAvailable)
|
||||
}
|
||||
Err(e) => {
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
||||
// Computing the format and initializing the device.
|
||||
let waveformatex = {
|
||||
let format_attempt = format_to_waveformatextensible(format)
|
||||
.ok_or(BuildStreamError::FormatNotSupported)?;
|
||||
let share_mode = AUDCLNT_SHAREMODE_SHARED;
|
||||
|
||||
// Ensure the format is supported.
|
||||
match super::device::is_format_supported(audio_client, &format_attempt.Format) {
|
||||
Ok(false) => return Err(BuildStreamError::FormatNotSupported),
|
||||
Err(_) => return Err(BuildStreamError::DeviceNotAvailable),
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// finally initializing the audio client
|
||||
let hresult = (*audio_client).Initialize(
|
||||
share_mode,
|
||||
AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
|
||||
0,
|
||||
0,
|
||||
&format_attempt.Format,
|
||||
ptr::null(),
|
||||
);
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("{}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
format_attempt.Format
|
||||
};
|
||||
|
||||
// Creating the event that will be signalled whenever we need to submit some samples.
|
||||
let event = {
|
||||
let event = synchapi::CreateEventA(ptr::null_mut(), 0, 0, ptr::null());
|
||||
if event.is_null() {
|
||||
(*audio_client).Release();
|
||||
let description = "failed to create event".to_string();
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
if let Err(e) = check_result((*audio_client).SetEventHandle(event)) {
|
||||
(*audio_client).Release();
|
||||
let description = format!("failed to call SetEventHandle: {}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
};
|
||||
|
||||
event
|
||||
};
|
||||
|
||||
// obtaining the size of the samples buffer in number of frames
|
||||
let max_frames_in_buffer = {
|
||||
let mut max_frames_in_buffer = mem::uninitialized();
|
||||
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
|
||||
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("failed to obtain buffer size: {}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
max_frames_in_buffer
|
||||
};
|
||||
|
||||
// Building a `IAudioRenderClient` that will be used to fill the samples buffer.
|
||||
let render_client = {
|
||||
let mut render_client: *mut audioclient::IAudioRenderClient = mem::uninitialized();
|
||||
let hresult = (*audio_client).GetService(
|
||||
&audioclient::IID_IAudioRenderClient,
|
||||
&mut render_client as *mut *mut audioclient::IAudioRenderClient as *mut _,
|
||||
);
|
||||
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
return Err(BuildStreamError::DeviceNotAvailable);
|
||||
}
|
||||
Err(e) => {
|
||||
(*audio_client).Release();
|
||||
let description = format!("failed to build render client: {}", e);
|
||||
let err = BackendSpecificError { description };
|
||||
return Err(err.into());
|
||||
}
|
||||
Ok(()) => (),
|
||||
};
|
||||
|
||||
&mut *render_client
|
||||
};
|
||||
|
||||
// Once we built the `StreamInner`, we add a command that will be picked up by the
|
||||
// `run()` method and added to the `RunContext`.
|
||||
let client_flow = AudioClientFlow::Render { render_client };
|
||||
|
||||
Ok(StreamInner {
|
||||
audio_client,
|
||||
client_flow,
|
||||
event,
|
||||
playing: false,
|
||||
max_frames_in_buffer,
|
||||
bytes_per_frame: waveformatex.nBlockAlign,
|
||||
sample_format: format.data_type,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Device {
|
||||
|
@ -586,38 +916,45 @@ impl PartialEq for Device {
|
|||
// In this code section we're trying to use the GetId method for the device comparison, cf.
|
||||
// https://docs.microsoft.com/en-us/windows/desktop/api/mmdeviceapi/nf-mmdeviceapi-immdevice-getid
|
||||
unsafe {
|
||||
struct IdRAII (LPWSTR);
|
||||
struct IdRAII(LPWSTR);
|
||||
/// RAII for device IDs.
|
||||
impl Drop for IdRAII {
|
||||
fn drop(&mut self) {
|
||||
unsafe {CoTaskMemFree(self.0 as *mut c_void)}
|
||||
unsafe { CoTaskMemFree(self.0 as *mut c_void) }
|
||||
}
|
||||
}
|
||||
let mut id1: LPWSTR = ptr::null_mut();
|
||||
let rc1 = (*self.device).GetId(&mut id1);
|
||||
// GetId only fails with E_OUTOFMEMORY and if it does, we're probably dead already.
|
||||
// Plus it won't do to change the device comparison logic unexpectedly.
|
||||
if rc1 != winerror::S_OK {panic! ("cpal: GetId failure: {}", rc1)}
|
||||
if rc1 != winerror::S_OK {
|
||||
panic!("cpal: GetId failure: {}", rc1)
|
||||
}
|
||||
let id1 = IdRAII(id1);
|
||||
let mut id2: LPWSTR = ptr::null_mut();
|
||||
let rc2 = (*other.device).GetId(&mut id2);
|
||||
if rc2 != winerror::S_OK {panic! ("cpal: GetId failure: {}", rc1)}
|
||||
if rc2 != winerror::S_OK {
|
||||
panic!("cpal: GetId failure: {}", rc1)
|
||||
}
|
||||
let id2 = IdRAII(id2);
|
||||
// 16-bit null-terminated comparison.
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
let w1: WCHAR = *id1.0.offset(offset);
|
||||
let w2: WCHAR = *id2.0.offset(offset);
|
||||
if w1 == 0 && w2 == 0 {return true}
|
||||
if w1 != w2 {return false}
|
||||
if w1 == 0 && w2 == 0 {
|
||||
return true;
|
||||
}
|
||||
if w1 != w2 {
|
||||
return false;
|
||||
}
|
||||
offset += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Device {
|
||||
}
|
||||
impl Eq for Device {}
|
||||
|
||||
impl Clone for Device {
|
||||
#[inline]
|
||||
|
@ -669,16 +1006,14 @@ impl From<*const IMMDevice> for Endpoint {
|
|||
fn from(device: *const IMMDevice) -> Self {
|
||||
unsafe {
|
||||
let endpoint = immendpoint_from_immdevice(device);
|
||||
Endpoint { endpoint: endpoint }
|
||||
Endpoint { endpoint }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Endpoint {
|
||||
fn data_flow(&self) -> EDataFlow {
|
||||
unsafe {
|
||||
data_flow_from_immendpoint(self.endpoint)
|
||||
}
|
||||
unsafe { data_flow_from_immendpoint(self.endpoint) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -709,10 +1044,8 @@ lazy_static! {
|
|||
/// RAII object around `IMMDeviceEnumerator`.
|
||||
struct Enumerator(*mut IMMDeviceEnumerator);
|
||||
|
||||
unsafe impl Send for Enumerator {
|
||||
}
|
||||
unsafe impl Sync for Enumerator {
|
||||
}
|
||||
unsafe impl Send for Enumerator {}
|
||||
unsafe impl Sync for Enumerator {}
|
||||
|
||||
impl Drop for Enumerator {
|
||||
#[inline]
|
||||
|
@ -735,20 +1068,18 @@ impl Devices {
|
|||
unsafe {
|
||||
let mut collection: *mut IMMDeviceCollection = mem::uninitialized();
|
||||
// can fail because of wrong parameters (should never happen) or out of memory
|
||||
check_result_backend_specific(
|
||||
(*ENUMERATOR.0).EnumAudioEndpoints(
|
||||
eAll,
|
||||
DEVICE_STATE_ACTIVE,
|
||||
&mut collection,
|
||||
)
|
||||
)?;
|
||||
check_result_backend_specific((*ENUMERATOR.0).EnumAudioEndpoints(
|
||||
eAll,
|
||||
DEVICE_STATE_ACTIVE,
|
||||
&mut collection,
|
||||
))?;
|
||||
|
||||
let mut count = mem::uninitialized();
|
||||
let count = mem::uninitialized();
|
||||
// can fail if the parameter is null, which should never happen
|
||||
check_result_backend_specific((*collection).GetCount(&mut count))?;
|
||||
check_result_backend_specific((*collection).GetCount(&count))?;
|
||||
|
||||
Ok(Devices {
|
||||
collection: collection,
|
||||
collection,
|
||||
total_count: count,
|
||||
next_item: 0,
|
||||
})
|
||||
|
@ -756,10 +1087,8 @@ impl Devices {
|
|||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Devices {
|
||||
}
|
||||
unsafe impl Sync for Devices {
|
||||
}
|
||||
unsafe impl Send for Devices {}
|
||||
unsafe impl Sync for Devices {}
|
||||
|
||||
impl Drop for Devices {
|
||||
#[inline]
|
||||
|
@ -799,8 +1128,7 @@ impl Iterator for Devices {
|
|||
fn default_device(data_flow: EDataFlow) -> Option<Device> {
|
||||
unsafe {
|
||||
let mut device = mem::uninitialized();
|
||||
let hres = (*ENUMERATOR.0)
|
||||
.GetDefaultAudioEndpoint(data_flow, eConsole, &mut device);
|
||||
let hres = (*ENUMERATOR.0).GetDefaultAudioEndpoint(data_flow, eConsole, &mut device);
|
||||
if let Err(_err) = check_result(hres) {
|
||||
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
|
||||
}
|
||||
|
@ -815,3 +1143,57 @@ pub fn default_input_device() -> Option<Device> {
|
|||
pub fn default_output_device() -> Option<Device> {
|
||||
default_device(eRender)
|
||||
}
|
||||
|
||||
// Turns a `Format` into a `WAVEFORMATEXTENSIBLE`.
|
||||
//
|
||||
// Returns `None` if the WAVEFORMATEXTENSIBLE does not support the given format.
|
||||
fn format_to_waveformatextensible(format: &Format) -> Option<mmreg::WAVEFORMATEXTENSIBLE> {
|
||||
let format_tag = match format.data_type {
|
||||
SampleFormat::I16 => mmreg::WAVE_FORMAT_PCM,
|
||||
SampleFormat::F32 => mmreg::WAVE_FORMAT_EXTENSIBLE,
|
||||
SampleFormat::U16 => return None,
|
||||
};
|
||||
let channels = format.channels as WORD;
|
||||
let sample_rate = format.sample_rate.0 as DWORD;
|
||||
let sample_bytes = format.data_type.sample_size() as WORD;
|
||||
let avg_bytes_per_sec = u32::from(channels) * sample_rate * u32::from(sample_bytes);
|
||||
let block_align = channels * sample_bytes;
|
||||
let bits_per_sample = 8 * sample_bytes;
|
||||
let cb_size = match format.data_type {
|
||||
SampleFormat::I16 => 0,
|
||||
SampleFormat::F32 => {
|
||||
let extensible_size = mem::size_of::<mmreg::WAVEFORMATEXTENSIBLE>();
|
||||
let ex_size = mem::size_of::<mmreg::WAVEFORMATEX>();
|
||||
(extensible_size - ex_size) as WORD
|
||||
}
|
||||
SampleFormat::U16 => return None,
|
||||
};
|
||||
let waveformatex = mmreg::WAVEFORMATEX {
|
||||
wFormatTag: format_tag,
|
||||
nChannels: channels,
|
||||
nSamplesPerSec: sample_rate,
|
||||
nAvgBytesPerSec: avg_bytes_per_sec,
|
||||
nBlockAlign: block_align,
|
||||
wBitsPerSample: bits_per_sample,
|
||||
cbSize: cb_size,
|
||||
};
|
||||
|
||||
// CPAL does not care about speaker positions, so pass audio straight through.
|
||||
// TODO: This constant should be defined in winapi but is missing.
|
||||
const KSAUDIO_SPEAKER_DIRECTOUT: DWORD = 0;
|
||||
let channel_mask = KSAUDIO_SPEAKER_DIRECTOUT;
|
||||
|
||||
let sub_format = match format.data_type {
|
||||
SampleFormat::I16 => ksmedia::KSDATAFORMAT_SUBTYPE_PCM,
|
||||
SampleFormat::F32 => ksmedia::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
|
||||
SampleFormat::U16 => return None,
|
||||
};
|
||||
let waveformatextensible = mmreg::WAVEFORMATEXTENSIBLE {
|
||||
Format: waveformatex,
|
||||
Samples: bits_per_sample as WORD,
|
||||
dwChannelMask: channel_mask,
|
||||
SubFormat: sub_format,
|
||||
};
|
||||
|
||||
Some(waveformatextensible)
|
||||
}
|
||||
|
|
|
@ -1,20 +1,15 @@
|
|||
extern crate winapi;
|
||||
|
||||
use BackendSpecificError;
|
||||
use BuildStreamError;
|
||||
use DefaultFormatError;
|
||||
use DeviceNameError;
|
||||
use DevicesError;
|
||||
use Format;
|
||||
use PlayStreamError;
|
||||
use PauseStreamError;
|
||||
use StreamDataResult;
|
||||
use SupportedFormatsError;
|
||||
pub use self::device::{
|
||||
default_input_device, default_output_device, Device, Devices, SupportedInputFormats,
|
||||
SupportedOutputFormats,
|
||||
};
|
||||
pub use self::stream::Stream;
|
||||
use self::winapi::um::winnt::HRESULT;
|
||||
use std::io::Error as IoError;
|
||||
use traits::{DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait};
|
||||
pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats, default_input_device, default_output_device};
|
||||
pub use self::stream::{EventLoop, StreamId};
|
||||
use traits::HostTrait;
|
||||
use BackendSpecificError;
|
||||
use DevicesError;
|
||||
|
||||
mod com;
|
||||
mod device;
|
||||
|
@ -37,7 +32,6 @@ impl Host {
|
|||
impl HostTrait for Host {
|
||||
type Devices = Devices;
|
||||
type Device = Device;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
// Assume WASAPI is always available on windows.
|
||||
|
@ -55,79 +49,8 @@ impl HostTrait for Host {
|
|||
fn default_output_device(&self) -> Option<Self::Device> {
|
||||
default_output_device()
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
EventLoop::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
|
||||
fn name(&self) -> Result<String, DeviceNameError> {
|
||||
Device::name(self)
|
||||
}
|
||||
|
||||
fn supported_input_formats(&self) -> Result<Self::SupportedInputFormats, SupportedFormatsError> {
|
||||
Device::supported_input_formats(self)
|
||||
}
|
||||
|
||||
fn supported_output_formats(&self) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> {
|
||||
Device::supported_output_formats(self)
|
||||
}
|
||||
|
||||
fn default_input_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_input_format(self)
|
||||
}
|
||||
|
||||
fn default_output_format(&self) -> Result<Format, DefaultFormatError> {
|
||||
Device::default_output_format(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventLoopTrait for EventLoop {
|
||||
type Device = Device;
|
||||
type StreamId = StreamId;
|
||||
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_input_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError> {
|
||||
EventLoop::build_output_stream(self, device, format)
|
||||
}
|
||||
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> {
|
||||
EventLoop::play_stream(self, stream)
|
||||
}
|
||||
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> {
|
||||
EventLoop::pause_stream(self, stream)
|
||||
}
|
||||
|
||||
fn destroy_stream(&self, stream: Self::StreamId) {
|
||||
EventLoop::destroy_stream(self, stream)
|
||||
}
|
||||
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
where
|
||||
F: FnMut(Self::StreamId, StreamDataResult) + Send,
|
||||
{
|
||||
EventLoop::run(self, callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl StreamIdTrait for StreamId {}
|
||||
|
||||
#[inline]
|
||||
fn check_result(result: HRESULT) -> Result<(), IoError> {
|
||||
if result < 0 {
|
||||
|
@ -140,9 +63,8 @@ fn check_result(result: HRESULT) -> Result<(), IoError> {
|
|||
fn check_result_backend_specific(result: HRESULT) -> Result<(), BackendSpecificError> {
|
||||
match check_result(result) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) => {
|
||||
let description = format!("{}", err);
|
||||
return Err(BackendSpecificError { description });
|
||||
}
|
||||
Err(err) => Err(BackendSpecificError {
|
||||
description: format!("{}", err),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
176
src/lib.rs
176
src/lib.rs
|
@ -7,25 +7,21 @@
|
|||
//! least one [**DefaultHost**](./struct.Host.html) that is guaranteed to be available.
|
||||
//! - A [**Device**](./struct.Device.html) is an audio device that may have any number of input and
|
||||
//! output streams.
|
||||
//! - A stream is an open flow of audio data. Input streams allow you to receive audio data, output
|
||||
//! streams allow you to play audio data. You must choose which **Device** will run your stream
|
||||
//! before you can create one. Often, a default device can be retrieved via the **Host**.
|
||||
//! - An [**EventLoop**](./struct.EventLoop.html) is a collection of streams being run by one or
|
||||
//! more **Device**s under a single **Host**. Each stream must belong to an **EventLoop**, and
|
||||
//! all the streams that belong to an **EventLoop** are managed together.
|
||||
//! - A [**Stream**](./trait.Stream.html) is an open flow of audio data. Input streams allow you to
|
||||
//! receive audio data, output streams allow you to play audio data. You must choose which
|
||||
//! **Device** will run your stream before you can create one. Often, a default device can be
|
||||
//! retrieved via the **Host**.
|
||||
//!
|
||||
//! The first step is to initialise the `Host` (for accessing audio devices) and create an
|
||||
//! `EventLoop`:
|
||||
//! The first step is to initialise the `Host`:
|
||||
//!
|
||||
//! ```
|
||||
//! use cpal::traits::HostTrait;
|
||||
//! let host = cpal::default_host();
|
||||
//! let event_loop = host.event_loop();
|
||||
//! ```
|
||||
//!
|
||||
//! Then choose a `Device`. The easiest way is to use the default input or output `Device` via the
|
||||
//! `default_input_device()` or `default_output_device()` functions. Alternatively you can
|
||||
//! enumerate all the available devices with the `devices()` function. Beware that the
|
||||
//! Then choose an available `Device`. The easiest way is to use the default input or output
|
||||
//! `Device` via the `default_input_device()` or `default_output_device()` functions. Alternatively
|
||||
//! you can enumerate all the available devices with the `devices()` function. Beware that the
|
||||
//! `default_*_device()` functions return an `Option` in case no device is available for that
|
||||
//! stream type on the system.
|
||||
//!
|
||||
|
@ -56,87 +52,97 @@
|
|||
//! .with_max_sample_rate();
|
||||
//! ```
|
||||
//!
|
||||
//! Now that we have everything for the stream, we can create it from our event loop:
|
||||
//! Now that we have everything for the stream, we are ready to create it from our selected device:
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait};
|
||||
//! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let event_loop = host.event_loop();
|
||||
//! # let device = host.default_output_device().unwrap();
|
||||
//! # let format = device.supported_output_formats().unwrap().next().unwrap().with_max_sample_rate();
|
||||
//! let stream_id = event_loop.build_output_stream(&device, &format).unwrap();
|
||||
//! # let format = device.default_output_format().unwrap();
|
||||
//! let stream = device.build_output_stream(
|
||||
//! &format,
|
||||
//! move |data| {
|
||||
//! // react to stream events and read or write stream data here.
|
||||
//! },
|
||||
//! move |err| {
|
||||
//! // react to errors here.
|
||||
//! },
|
||||
//! );
|
||||
//! ```
|
||||
//!
|
||||
//! The value returned by `build_output_stream()` is of type `StreamId` and is an identifier that
|
||||
//! will allow you to control the stream.
|
||||
//! While the stream is running, the selected audio device will periodically call the data callback
|
||||
//! that was passed to the function. The callback is passed an instance of type `StreamData` that
|
||||
//! represents the data that must be read from or written to. The inner `UnknownTypeOutputBuffer`
|
||||
//! can be one of `I16`, `U16` or `F32` depending on the format that was passed to
|
||||
//! `build_output_stream`.
|
||||
//!
|
||||
//! Now we must start the stream. This is done with the `play_stream()` method on the event loop.
|
||||
//!
|
||||
//! ```no_run
|
||||
//! # use cpal::traits::{EventLoopTrait, HostTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let event_loop = host.event_loop();
|
||||
//! # let stream_id = unimplemented!();
|
||||
//! event_loop.play_stream(stream_id).expect("failed to play_stream");
|
||||
//! ```
|
||||
//!
|
||||
//! Now everything is ready! We call `run()` on the `event_loop` to begin processing.
|
||||
//!
|
||||
//! ```no_run
|
||||
//! # use cpal::traits::{EventLoopTrait, HostTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let event_loop = host.event_loop();
|
||||
//! event_loop.run(move |_stream_id, _stream_result| {
|
||||
//! // react to stream events and read or write stream data here
|
||||
//! });
|
||||
//! ```
|
||||
//!
|
||||
//! > **Note**: Calling `run()` will block the thread forever, so it's usually best done in a
|
||||
//! > separate thread.
|
||||
//!
|
||||
//! While `run()` is running, the audio device of the user will from time to time call the callback
|
||||
//! that you passed to this function. The callback gets passed the stream ID and an instance of type
|
||||
//! `StreamData` that represents the data that must be read from or written to. The inner
|
||||
//! `UnknownTypeOutputBuffer` can be one of `I16`, `U16` or `F32` depending on the format that was
|
||||
//! passed to `build_output_stream`.
|
||||
//! > **Note**: Creating and running a stream will *not* block the thread. On modern platforms, the
|
||||
//! > given callback is called by a dedicated, high-priority thread responsible for delivering
|
||||
//! > audio data to the system's audio device in a timely manner. On older platforms that only
|
||||
//! > provide a blocking API (e.g. ALSA), CPAL will create a thread in order to consistently
|
||||
//! > provide non-blocking behaviour (currently this is a thread per stream, but this may change to
|
||||
//! > use a single thread for all streams). *If this is an issue for your platform or design,
|
||||
//! > please share your issue and use-case with the CPAL team on the github issue tracker for
|
||||
//! > consideration.*
|
||||
//!
|
||||
//! In this example, we simply fill the given output buffer with zeroes.
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use cpal::{StreamData, UnknownTypeOutputBuffer};
|
||||
//! use cpal::traits::{EventLoopTrait, HostTrait};
|
||||
//! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let event_loop = host.event_loop();
|
||||
//! event_loop.run(move |stream_id, stream_result| {
|
||||
//! let stream_data = match stream_result {
|
||||
//! Ok(data) => data,
|
||||
//! Err(err) => {
|
||||
//! eprintln!("an error occurred on stream {:?}: {}", stream_id, err);
|
||||
//! return;
|
||||
//! # let device = host.default_output_device().unwrap();
|
||||
//! # let format = device.default_output_format().unwrap();
|
||||
//! let stream = device.build_output_stream(
|
||||
//! &format,
|
||||
//! move |data| {
|
||||
//! match data {
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::U16(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = u16::max_value() / 2;
|
||||
//! }
|
||||
//! },
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::I16(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = 0;
|
||||
//! }
|
||||
//! },
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::F32(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = 0.0;
|
||||
//! }
|
||||
//! },
|
||||
//! _ => (),
|
||||
//! }
|
||||
//! _ => return,
|
||||
//! };
|
||||
//!
|
||||
//! match stream_data {
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::U16(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = u16::max_value() / 2;
|
||||
//! }
|
||||
//! },
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::I16(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = 0;
|
||||
//! }
|
||||
//! },
|
||||
//! StreamData::Output { buffer: UnknownTypeOutputBuffer::F32(mut buffer) } => {
|
||||
//! for elem in buffer.iter_mut() {
|
||||
//! *elem = 0.0;
|
||||
//! }
|
||||
//! },
|
||||
//! _ => (),
|
||||
//! }
|
||||
//! });
|
||||
//! },
|
||||
//! move |err| {
|
||||
//! eprintln!("an error occurred on the output audio stream: {}", err);
|
||||
//! },
|
||||
//! );
|
||||
//! ```
|
||||
//!
|
||||
//! Not all platforms automatically run the stream upon creation. To ensure the stream has started,
|
||||
//! we can use `Stream::play`.
|
||||
//!
|
||||
//! ```no_run
|
||||
//! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let device = host.default_output_device().unwrap();
|
||||
//! # let format = device.default_output_format().unwrap();
|
||||
//! # let stream = device.build_output_stream(&format, move |_data| {}, move |_err| {}).unwrap();
|
||||
//! stream.play().unwrap();
|
||||
//! ```
|
||||
//!
|
||||
//! Some devices support pausing the audio stream. This can be useful for saving energy in moments
|
||||
//! of silence.
|
||||
//!
|
||||
//! ```no_run
|
||||
//! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
//! # let host = cpal::default_host();
|
||||
//! # let device = host.default_output_device().unwrap();
|
||||
//! # let format = device.default_output_format().unwrap();
|
||||
//! # let stream = device.build_output_stream(&format, move |_data| {}, move |_err| {}).unwrap();
|
||||
//! stream.pause().unwrap();
|
||||
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
|
@ -151,11 +157,10 @@ extern crate thiserror;
|
|||
|
||||
pub use error::*;
|
||||
pub use platform::{
|
||||
ALL_HOSTS, Device, Devices, EventLoop, Host, HostId, SupportedInputFormats,
|
||||
SupportedOutputFormats, StreamId, available_hosts, default_host, host_from_id,
|
||||
ALL_HOSTS, available_hosts, default_host, Device, Devices, Host, host_from_id,
|
||||
HostId, Stream, SupportedInputFormats, SupportedOutputFormats,
|
||||
};
|
||||
pub use samples_formats::{Sample, SampleFormat};
|
||||
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
mod error;
|
||||
|
@ -198,6 +203,7 @@ pub struct SupportedFormat {
|
|||
}
|
||||
|
||||
/// Stream data passed to the `EventLoop::run` callback.
|
||||
#[derive(Debug)]
|
||||
pub enum StreamData<'a> {
|
||||
Input {
|
||||
buffer: UnknownTypeInputBuffer<'a>,
|
||||
|
@ -207,16 +213,13 @@ pub enum StreamData<'a> {
|
|||
},
|
||||
}
|
||||
|
||||
/// Stream data passed to the `EventLoop::run` callback, or an error in the case that the device
|
||||
/// was invalidated or some backend-specific error occurred.
|
||||
pub type StreamDataResult<'a> = Result<StreamData<'a>, StreamError>;
|
||||
|
||||
/// Represents a buffer containing audio data that may be read.
|
||||
///
|
||||
/// This struct implements the `Deref` trait targeting `[T]`. Therefore this buffer can be read the
|
||||
/// same way as reading from a `Vec` or any other kind of Rust array.
|
||||
// TODO: explain audio stuff in general
|
||||
// TODO: remove the wrapper and just use slices in next major version
|
||||
#[derive(Debug)]
|
||||
pub struct InputBuffer<'a, T: 'a>
|
||||
where
|
||||
T: Sample,
|
||||
|
@ -232,6 +235,7 @@ where
|
|||
// TODO: explain audio stuff in general
|
||||
// TODO: remove the wrapper and just use slices
|
||||
#[must_use]
|
||||
#[derive(Debug)]
|
||||
pub struct OutputBuffer<'a, T: 'a>
|
||||
where
|
||||
T: Sample,
|
||||
|
@ -242,6 +246,7 @@ where
|
|||
/// This is the struct that is provided to you by cpal when you want to read samples from a buffer.
|
||||
///
|
||||
/// Since the type of data is only known at runtime, you have to read the right buffer.
|
||||
#[derive(Debug)]
|
||||
pub enum UnknownTypeInputBuffer<'a> {
|
||||
/// Samples whose format is `u16`.
|
||||
U16(InputBuffer<'a, u16>),
|
||||
|
@ -254,6 +259,7 @@ pub enum UnknownTypeInputBuffer<'a> {
|
|||
/// This is the struct that is provided to you by cpal when you want to write samples to a buffer.
|
||||
///
|
||||
/// Since the type of data is only known at runtime, you have to fill the right buffer.
|
||||
#[derive(Debug)]
|
||||
pub enum UnknownTypeOutputBuffer<'a> {
|
||||
/// Samples whose format is `u16`.
|
||||
U16(OutputBuffer<'a, u16>),
|
||||
|
|
|
@ -58,14 +58,14 @@ macro_rules! impl_platform_host {
|
|||
/// type.
|
||||
pub struct Devices(DevicesInner);
|
||||
|
||||
/// The **EventLoop** implementation associated with the platform's dynamically dispatched
|
||||
/// The **Stream** implementation associated with the platform's dynamically dispatched
|
||||
/// **Host** type.
|
||||
pub struct EventLoop(EventLoopInner);
|
||||
|
||||
/// The **StreamId** implementation associated with the platform's dynamically dispatched
|
||||
/// **Host** type.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||
pub struct StreamId(StreamIdInner);
|
||||
// Streams cannot be `Send` or `Sync` if we plan to support Android's AAudio API. This is
|
||||
// because the stream API is not thread-safe, and the API prohibits calling certain
|
||||
// functions within the callback.
|
||||
//
|
||||
// TODO: Confirm this and add more specific detail and references.
|
||||
pub struct Stream(StreamInner, crate::platform::NotSendSyncAcrossAllPlatforms);
|
||||
|
||||
/// The **SupportedInputFormats** iterator associated with the platform's dynamically
|
||||
/// dispatched **Host** type.
|
||||
|
@ -95,22 +95,15 @@ macro_rules! impl_platform_host {
|
|||
)*
|
||||
}
|
||||
|
||||
enum EventLoopInner {
|
||||
$(
|
||||
$HostVariant(crate::host::$host_mod::EventLoop),
|
||||
)*
|
||||
}
|
||||
|
||||
enum HostInner {
|
||||
$(
|
||||
$HostVariant(crate::host::$host_mod::Host),
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||
enum StreamIdInner {
|
||||
enum StreamInner {
|
||||
$(
|
||||
$HostVariant(crate::host::$host_mod::StreamId),
|
||||
$HostVariant(crate::host::$host_mod::Stream),
|
||||
)*
|
||||
}
|
||||
|
||||
|
@ -154,7 +147,7 @@ macro_rules! impl_platform_host {
|
|||
match self.0 {
|
||||
$(
|
||||
DevicesInner::$HostVariant(ref mut d) => {
|
||||
d.next().map(DeviceInner::$HostVariant).map(Device)
|
||||
d.next().map(DeviceInner::$HostVariant).map(Device::from)
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
@ -212,6 +205,7 @@ macro_rules! impl_platform_host {
|
|||
impl crate::traits::DeviceTrait for Device {
|
||||
type SupportedInputFormats = SupportedInputFormats;
|
||||
type SupportedOutputFormats = SupportedOutputFormats;
|
||||
type Stream = Stream;
|
||||
|
||||
fn name(&self) -> Result<String, crate::DeviceNameError> {
|
||||
match self.0 {
|
||||
|
@ -260,96 +254,25 @@ macro_rules! impl_platform_host {
|
|||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::traits::EventLoopTrait for EventLoop {
|
||||
type StreamId = StreamId;
|
||||
type Device = Device;
|
||||
|
||||
#[allow(unreachable_patterns)]
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &crate::Format,
|
||||
) -> Result<Self::StreamId, crate::BuildStreamError> {
|
||||
match (&self.0, &device.0) {
|
||||
$(
|
||||
(&EventLoopInner::$HostVariant(ref e), &DeviceInner::$HostVariant(ref d)) => {
|
||||
e.build_input_stream(d, format)
|
||||
.map(StreamIdInner::$HostVariant)
|
||||
.map(StreamId)
|
||||
}
|
||||
)*
|
||||
_ => panic!("tried to build a stream with a device from another host"),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_patterns)]
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &crate::Format,
|
||||
) -> Result<Self::StreamId, crate::BuildStreamError> {
|
||||
match (&self.0, &device.0) {
|
||||
$(
|
||||
(&EventLoopInner::$HostVariant(ref e), &DeviceInner::$HostVariant(ref d)) => {
|
||||
e.build_output_stream(d, format)
|
||||
.map(StreamIdInner::$HostVariant)
|
||||
.map(StreamId)
|
||||
}
|
||||
)*
|
||||
_ => panic!("tried to build a stream with a device from another host"),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_patterns)]
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), crate::PlayStreamError> {
|
||||
match (&self.0, stream.0) {
|
||||
$(
|
||||
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
|
||||
e.play_stream(s.clone())
|
||||
}
|
||||
)*
|
||||
_ => panic!("tried to play a stream with an ID associated with another host"),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_patterns)]
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), crate::PauseStreamError> {
|
||||
match (&self.0, stream.0) {
|
||||
$(
|
||||
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
|
||||
e.pause_stream(s.clone())
|
||||
}
|
||||
)*
|
||||
_ => panic!("tried to pause a stream with an ID associated with another host"),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unreachable_patterns)]
|
||||
fn destroy_stream(&self, stream: Self::StreamId) {
|
||||
match (&self.0, stream.0) {
|
||||
$(
|
||||
(&EventLoopInner::$HostVariant(ref e), StreamIdInner::$HostVariant(ref s)) => {
|
||||
e.destroy_stream(s.clone())
|
||||
}
|
||||
)*
|
||||
_ => panic!("tried to destroy a stream with an ID associated with another host"),
|
||||
}
|
||||
}
|
||||
|
||||
fn run<F>(&self, mut callback: F) -> !
|
||||
where
|
||||
F: FnMut(Self::StreamId, crate::StreamDataResult) + Send
|
||||
{
|
||||
fn build_input_stream<D, E>(&self, format: &crate::Format, data_callback: D, error_callback: E) -> Result<Self::Stream, crate::BuildStreamError>
|
||||
where D: FnMut(crate::StreamData) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static {
|
||||
match self.0 {
|
||||
$(
|
||||
EventLoopInner::$HostVariant(ref e) => {
|
||||
e.run(|id, result| {
|
||||
let result = result;
|
||||
callback(StreamId(StreamIdInner::$HostVariant(id)), result);
|
||||
});
|
||||
},
|
||||
DeviceInner::$HostVariant(ref d) => d.build_input_stream(format, data_callback, error_callback)
|
||||
.map(StreamInner::$HostVariant)
|
||||
.map(Stream::from),
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
fn build_output_stream<D, E>(&self, format: &crate::Format, data_callback: D, error_callback: E) -> Result<Self::Stream, crate::BuildStreamError>
|
||||
where D: FnMut(crate::StreamData) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static {
|
||||
match self.0 {
|
||||
$(
|
||||
DeviceInner::$HostVariant(ref d) => d.build_output_stream(format, data_callback, error_callback)
|
||||
.map(StreamInner::$HostVariant)
|
||||
.map(Stream::from),
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
@ -358,7 +281,6 @@ macro_rules! impl_platform_host {
|
|||
impl crate::traits::HostTrait for Host {
|
||||
type Devices = Devices;
|
||||
type Device = Device;
|
||||
type EventLoop = EventLoop;
|
||||
|
||||
fn is_available() -> bool {
|
||||
$( crate::host::$host_mod::Host::is_available() ||)* false
|
||||
|
@ -368,7 +290,7 @@ macro_rules! impl_platform_host {
|
|||
match self.0 {
|
||||
$(
|
||||
HostInner::$HostVariant(ref h) => {
|
||||
h.devices().map(DevicesInner::$HostVariant).map(Devices)
|
||||
h.devices().map(DevicesInner::$HostVariant).map(Devices::from)
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
@ -378,7 +300,7 @@ macro_rules! impl_platform_host {
|
|||
match self.0 {
|
||||
$(
|
||||
HostInner::$HostVariant(ref h) => {
|
||||
h.default_input_device().map(DeviceInner::$HostVariant).map(Device)
|
||||
h.default_input_device().map(DeviceInner::$HostVariant).map(Device::from)
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
@ -388,53 +310,81 @@ macro_rules! impl_platform_host {
|
|||
match self.0 {
|
||||
$(
|
||||
HostInner::$HostVariant(ref h) => {
|
||||
h.default_output_device().map(DeviceInner::$HostVariant).map(Device)
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
fn event_loop(&self) -> Self::EventLoop {
|
||||
match self.0 {
|
||||
$(
|
||||
HostInner::$HostVariant(ref h) => {
|
||||
EventLoop(EventLoopInner::$HostVariant(h.event_loop()))
|
||||
h.default_output_device().map(DeviceInner::$HostVariant).map(Device::from)
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::traits::StreamIdTrait for StreamId {}
|
||||
impl crate::traits::StreamTrait for Stream {
|
||||
fn play(&self) -> Result<(), crate::PlayStreamError> {
|
||||
match self.0 {
|
||||
$(
|
||||
StreamInner::$HostVariant(ref s) => {
|
||||
s.play()
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
fn pause(&self) -> Result<(), crate::PauseStreamError> {
|
||||
match self.0 {
|
||||
$(
|
||||
StreamInner::$HostVariant(ref s) => {
|
||||
s.pause()
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DeviceInner> for Device {
|
||||
fn from(d: DeviceInner) -> Self {
|
||||
Device(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DevicesInner> for Devices {
|
||||
fn from(d: DevicesInner) -> Self {
|
||||
Devices(d)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HostInner> for Host {
|
||||
fn from(h: HostInner) -> Self {
|
||||
Host(h)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StreamInner> for Stream {
|
||||
fn from(s: StreamInner) -> Self {
|
||||
Stream(s, Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
impl From<crate::host::$host_mod::Device> for Device {
|
||||
fn from(h: crate::host::$host_mod::Device) -> Self {
|
||||
Device(DeviceInner::$HostVariant(h))
|
||||
DeviceInner::$HostVariant(h).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::host::$host_mod::Devices> for Devices {
|
||||
fn from(h: crate::host::$host_mod::Devices) -> Self {
|
||||
Devices(DevicesInner::$HostVariant(h))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::host::$host_mod::EventLoop> for EventLoop {
|
||||
fn from(h: crate::host::$host_mod::EventLoop) -> Self {
|
||||
EventLoop(EventLoopInner::$HostVariant(h))
|
||||
DevicesInner::$HostVariant(h).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::host::$host_mod::Host> for Host {
|
||||
fn from(h: crate::host::$host_mod::Host) -> Self {
|
||||
Host(HostInner::$HostVariant(h))
|
||||
HostInner::$HostVariant(h).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::host::$host_mod::StreamId> for StreamId {
|
||||
fn from(h: crate::host::$host_mod::StreamId) -> Self {
|
||||
StreamId(StreamIdInner::$HostVariant(h))
|
||||
impl From<crate::host::$host_mod::Stream> for Stream {
|
||||
fn from(h: crate::host::$host_mod::Stream) -> Self {
|
||||
StreamInner::$HostVariant(h).into()
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
@ -457,7 +407,7 @@ macro_rules! impl_platform_host {
|
|||
HostId::$HostVariant => {
|
||||
crate::host::$host_mod::Host::new()
|
||||
.map(HostInner::$HostVariant)
|
||||
.map(Host)
|
||||
.map(Host::from)
|
||||
}
|
||||
)*
|
||||
}
|
||||
|
@ -471,9 +421,8 @@ mod platform_impl {
|
|||
pub use crate::host::alsa::{
|
||||
Device as AlsaDevice,
|
||||
Devices as AlsaDevices,
|
||||
EventLoop as AlsaEventLoop,
|
||||
Host as AlsaHost,
|
||||
StreamId as AlsaStreamId,
|
||||
Stream as AlsaStream,
|
||||
SupportedInputFormats as AlsaSupportedInputFormats,
|
||||
SupportedOutputFormats as AlsaSupportedOutputFormats,
|
||||
};
|
||||
|
@ -494,9 +443,8 @@ mod platform_impl {
|
|||
pub use crate::host::coreaudio::{
|
||||
Device as CoreAudioDevice,
|
||||
Devices as CoreAudioDevices,
|
||||
EventLoop as CoreAudioEventLoop,
|
||||
Host as CoreAudioHost,
|
||||
StreamId as CoreAudioStreamId,
|
||||
Stream as CoreAudioStream,
|
||||
SupportedInputFormats as CoreAudioSupportedInputFormats,
|
||||
SupportedOutputFormats as CoreAudioSupportedOutputFormats,
|
||||
};
|
||||
|
@ -516,9 +464,8 @@ mod platform_impl {
|
|||
pub use crate::host::emscripten::{
|
||||
Device as EmscriptenDevice,
|
||||
Devices as EmscriptenDevices,
|
||||
EventLoop as EmscriptenEventLoop,
|
||||
Host as EmscriptenHost,
|
||||
StreamId as EmscriptenStreamId,
|
||||
Stream as EmscriptenStream,
|
||||
SupportedInputFormats as EmscriptenSupportedInputFormats,
|
||||
SupportedOutputFormats as EmscriptenSupportedOutputFormats,
|
||||
};
|
||||
|
@ -539,18 +486,16 @@ mod platform_impl {
|
|||
pub use crate::host::asio::{
|
||||
Device as AsioDevice,
|
||||
Devices as AsioDevices,
|
||||
EventLoop as AsioEventLoop,
|
||||
Stream as AsioStream,
|
||||
Host as AsioHost,
|
||||
StreamId as AsioStreamId,
|
||||
SupportedInputFormats as AsioSupportedInputFormats,
|
||||
SupportedOutputFormats as AsioSupportedOutputFormats,
|
||||
};
|
||||
pub use crate::host::wasapi::{
|
||||
Device as WasapiDevice,
|
||||
Devices as WasapiDevices,
|
||||
EventLoop as WasapiEventLoop,
|
||||
Stream as WasapiStream,
|
||||
Host as WasapiHost,
|
||||
StreamId as WasapiStreamId,
|
||||
SupportedInputFormats as WasapiSupportedInputFormats,
|
||||
SupportedOutputFormats as WasapiSupportedOutputFormats,
|
||||
};
|
||||
|
@ -591,3 +536,19 @@ mod platform_impl {
|
|||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
// The following zero-sized types are for applying Send/Sync restrictions to ensure
|
||||
// consistent behaviour across different platforms. These verbosely named types are used
|
||||
// (rather than using the markers directly) in the hope of making the compile errors
|
||||
// slightly more helpful.
|
||||
//
|
||||
// TODO: Remove these in favour of using negative trait bounds if they stabilise.
|
||||
|
||||
// A marker used to remove the `Send` and `Sync` traits.
|
||||
struct NotSendSyncAcrossAllPlatforms(std::marker::PhantomData<*mut ()>);
|
||||
|
||||
impl Default for NotSendSyncAcrossAllPlatforms {
|
||||
fn default() -> Self {
|
||||
NotSendSyncAcrossAllPlatforms(std::marker::PhantomData)
|
||||
}
|
||||
}
|
||||
|
|
103
src/traits.rs
103
src/traits.rs
|
@ -10,7 +10,8 @@ use {
|
|||
OutputDevices,
|
||||
PauseStreamError,
|
||||
PlayStreamError,
|
||||
StreamDataResult,
|
||||
StreamData,
|
||||
StreamError,
|
||||
SupportedFormat,
|
||||
SupportedFormatsError,
|
||||
};
|
||||
|
@ -39,8 +40,6 @@ pub trait HostTrait {
|
|||
type Devices: Iterator<Item = Self::Device>;
|
||||
/// The `Device` type yielded by the host.
|
||||
type Device: DeviceTrait;
|
||||
/// The event loop type used by the `Host`
|
||||
type EventLoop: EventLoopTrait<Device = Self::Device>;
|
||||
|
||||
/// Whether or not the host is available on the system.
|
||||
fn is_available() -> bool;
|
||||
|
@ -60,9 +59,6 @@ pub trait HostTrait {
|
|||
/// Returns `None` if no output device is available.
|
||||
fn default_output_device(&self) -> Option<Self::Device>;
|
||||
|
||||
/// Initialise the event loop, ready for managing audio streams.
|
||||
fn event_loop(&self) -> Self::EventLoop;
|
||||
|
||||
/// An iterator yielding all `Device`s currently available to the system that support one or more
|
||||
/// input stream formats.
|
||||
///
|
||||
|
@ -99,6 +95,8 @@ pub trait DeviceTrait {
|
|||
type SupportedInputFormats: Iterator<Item = SupportedFormat>;
|
||||
/// The iterator type yielding supported output stream formats.
|
||||
type SupportedOutputFormats: Iterator<Item = SupportedFormat>;
|
||||
/// The stream type created by `build_input_stream` and `build_output_stream`.
|
||||
type Stream: StreamTrait;
|
||||
|
||||
/// The human-readable name of the device.
|
||||
fn name(&self) -> Result<String, DeviceNameError>;
|
||||
|
@ -118,81 +116,28 @@ pub trait DeviceTrait {
|
|||
|
||||
/// The default output stream format for the device.
|
||||
fn default_output_format(&self) -> Result<Format, DefaultFormatError>;
|
||||
|
||||
/// Create an input stream.
|
||||
fn build_input_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static;
|
||||
|
||||
/// Create an output stream.
|
||||
fn build_output_stream<D, E>(&self, format: &Format, data_callback: D, error_callback: E) -> Result<Self::Stream, BuildStreamError>
|
||||
where D: FnMut(StreamData) + Send + 'static, E: FnMut(StreamError) + Send + 'static;
|
||||
}
|
||||
|
||||
/// Collection of streams managed together.
|
||||
///
|
||||
/// Created with the `Host::event_loop` method.
|
||||
pub trait EventLoopTrait {
|
||||
/// The `Device` type yielded by the host.
|
||||
type Device: DeviceTrait;
|
||||
/// The type used to uniquely distinguish between streams.
|
||||
type StreamId: StreamIdTrait;
|
||||
/// A stream created from `Device`, with methods to control playback.
|
||||
pub trait StreamTrait {
|
||||
/// Run the stream.
|
||||
///
|
||||
/// Note: Not all platforms automatically run the stream upon creation, so it is important to
|
||||
/// call `play` after creation if it is expected that the stream should run immediately.
|
||||
fn play(&self) -> Result<(), PlayStreamError>;
|
||||
|
||||
/// Creates a new input stream that will run from the given device and with the given format.
|
||||
/// Some devices support pausing the audio stream. This can be useful for saving energy in
|
||||
/// moments of silence.
|
||||
///
|
||||
/// On success, returns an identifier for the stream.
|
||||
///
|
||||
/// Can return an error if the device is no longer valid, or if the input stream format is not
|
||||
/// supported by the device.
|
||||
fn build_input_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError>;
|
||||
|
||||
/// Creates a new output stream that will play on the given device and with the given format.
|
||||
///
|
||||
/// On success, returns an identifier for the stream.
|
||||
///
|
||||
/// Can return an error if the device is no longer valid, or if the output stream format is not
|
||||
/// supported by the device.
|
||||
fn build_output_stream(
|
||||
&self,
|
||||
device: &Self::Device,
|
||||
format: &Format,
|
||||
) -> Result<Self::StreamId, BuildStreamError>;
|
||||
|
||||
/// Instructs the audio device that it should start playing the stream with the given ID.
|
||||
///
|
||||
/// Has no effect is the stream was already playing.
|
||||
///
|
||||
/// Only call this after you have submitted some data, otherwise you may hear some glitches.
|
||||
///
|
||||
/// # Panic
|
||||
///
|
||||
/// If the stream does not exist, this function can either panic or be a no-op.
|
||||
fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError>;
|
||||
|
||||
/// Instructs the audio device that it should stop playing the stream with the given ID.
|
||||
///
|
||||
/// Has no effect is the stream was already paused.
|
||||
///
|
||||
/// If you call `play` afterwards, the playback will resume where it was.
|
||||
///
|
||||
/// # Panic
|
||||
///
|
||||
/// If the stream does not exist, this function can either panic or be a no-op.
|
||||
fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError>;
|
||||
|
||||
/// Destroys an existing stream.
|
||||
///
|
||||
/// # Panic
|
||||
///
|
||||
/// If the stream does not exist, this function can either panic or be a no-op.
|
||||
fn destroy_stream(&self, stream: Self::StreamId);
|
||||
|
||||
/// Takes control of the current thread and begins the stream processing.
|
||||
///
|
||||
/// > **Note**: Since it takes control of the thread, this method is best called on a separate
|
||||
/// > thread.
|
||||
///
|
||||
/// Whenever a stream needs to be fed some data, the closure passed as parameter is called.
|
||||
/// You can call the other methods of `EventLoop` without getting a deadlock.
|
||||
fn run<F>(&self, callback: F) -> !
|
||||
where
|
||||
F: FnMut(Self::StreamId, StreamDataResult) + Send;
|
||||
/// Note: Not all devices support suspending the stream at the hardware level. This method may
|
||||
/// fail in these cases.
|
||||
fn pause(&self) -> Result<(), PauseStreamError>;
|
||||
}
|
||||
|
||||
/// The set of required bounds for host `StreamId` types.
|
||||
pub trait StreamIdTrait: Clone + std::fmt::Debug + std::hash::Hash + PartialEq + Eq {}
|
||||
|
|
Loading…
Reference in New Issue