Run rustfmt on the code (#162)

This commit is contained in:
tomaka 2017-10-11 13:24:49 +02:00 committed by GitHub
parent cdcef96279
commit 2028d5907f
13 changed files with 622 additions and 399 deletions

18
.rustfmt.toml Normal file
View File

@ -0,0 +1,18 @@
fn_args_density = "Compressed"
fn_args_layout = "Visual"
fn_brace_style = "SameLineWhere"
fn_call_style = "Visual"
fn_empty_single_line = false
format_strings = true
generics_indent = "Visual"
impl_empty_single_line = false
match_block_trailing_comma = true
reorder_imported_names = true
reorder_imports = true
reorder_imports_in_group = true
spaces_around_ranges = true
use_try_shorthand = true
where_density = "Tall"
where_style = "Legacy"
wrap_match_arms = false
write_mode = "Overwrite"

View File

@ -20,12 +20,17 @@ impl Executor for MyExecutor {
fn main() {
let endpoint = cpal::default_endpoint().expect("Failed to get default endpoint");
let format = endpoint.supported_formats().unwrap().next().expect("Failed to get endpoint format");
let format = endpoint
.supported_formats()
.unwrap()
.next()
.expect("Failed to get endpoint format");
let event_loop = cpal::EventLoop::new();
let executor = Arc::new(MyExecutor);
let (mut voice, stream) = cpal::Voice::new(&endpoint, &format, &event_loop).expect("Failed to create a voice");
let (mut voice, stream) = cpal::Voice::new(&endpoint, &format, &event_loop)
.expect("Failed to create a voice");
// Produce a sinusoid of maximum amplitude.
let samples_rate = format.samples_rate.0 as f32;
@ -36,22 +41,37 @@ fn main() {
task::spawn(stream.for_each(move |buffer| -> Result<_, ()> {
match buffer {
cpal::UnknownTypeBuffer::U16(mut buffer) => {
for (sample, value) in buffer.chunks_mut(format.channels.len()).zip(&mut data_source) {
for (sample, value) in buffer
.chunks_mut(format.channels.len())
.zip(&mut data_source)
{
let value = ((value * 0.5 + 0.5) * std::u16::MAX as f32) as u16;
for out in sample.iter_mut() { *out = value; }
for out in sample.iter_mut() {
*out = value;
}
}
},
cpal::UnknownTypeBuffer::I16(mut buffer) => {
for (sample, value) in buffer.chunks_mut(format.channels.len()).zip(&mut data_source) {
for (sample, value) in buffer
.chunks_mut(format.channels.len())
.zip(&mut data_source)
{
let value = (value * std::i16::MAX as f32) as i16;
for out in sample.iter_mut() { *out = value; }
for out in sample.iter_mut() {
*out = value;
}
}
},
cpal::UnknownTypeBuffer::F32(mut buffer) => {
for (sample, value) in buffer.chunks_mut(format.channels.len()).zip(&mut data_source) {
for out in sample.iter_mut() { *out = value; }
for (sample, value) in buffer
.chunks_mut(format.channels.len())
.zip(&mut data_source)
{
for out in sample.iter_mut() {
*out = value;
}
}
},
};
@ -59,14 +79,12 @@ fn main() {
Ok(())
})).execute(executor);
thread::spawn(move || {
loop {
thread::sleep(Duration::from_millis(500));
voice.pause();
thread::sleep(Duration::from_millis(500));
voice.play();
}
});
thread::spawn(move || loop {
thread::sleep(Duration::from_millis(500));
voice.pause();
thread::sleep(Duration::from_millis(500));
voice.play();
});
event_loop.run();
}

View File

@ -5,11 +5,16 @@ fn main() {
println!("Endpoints: ");
for (endpoint_index, endpoint) in endpoints.enumerate() {
println!("{}. Endpoint \"{}\" Audio formats: ", endpoint_index + 1, endpoint.name());
println!("{}. Endpoint \"{}\" Audio formats: ",
endpoint_index + 1,
endpoint.name());
let formats = match endpoint.supported_formats() {
Ok(f) => f,
Err(e) => { println!("Error: {:?}", e); continue; }
Err(e) => {
println!("Error: {:?}", e);
continue;
},
};
for (format_index, format) in formats.enumerate() {

View File

@ -1,6 +1,7 @@
use super::Endpoint;
use super::alsa;
use super::check_errors;
use super::Endpoint;
use std::ffi::CStr;
use std::ffi::CString;
@ -17,8 +18,10 @@ pub struct EndpointsIterator {
next_str: *const *const u8,
}
unsafe impl Send for EndpointsIterator {}
unsafe impl Sync for EndpointsIterator {}
unsafe impl Send for EndpointsIterator {
}
unsafe impl Sync for EndpointsIterator {
}
impl Drop for EndpointsIterator {
#[inline]
@ -34,8 +37,8 @@ impl Default for EndpointsIterator {
unsafe {
let mut hints = mem::uninitialized();
// TODO: check in which situation this can fail
check_errors(alsa::snd_device_name_hint(-1, b"pcm\0".as_ptr() as *const _,
&mut hints)).unwrap();
check_errors(alsa::snd_device_name_hint(-1, b"pcm\0".as_ptr() as *const _, &mut hints))
.unwrap();
let hints = hints as *const *const u8;
@ -95,8 +98,10 @@ impl Iterator for EndpointsIterator {
// trying to open the PCM device to see if it can be opened
let name_zeroed = CString::new(name.clone()).unwrap();
let mut playback_handle = mem::uninitialized();
if alsa::snd_pcm_open(&mut playback_handle, name_zeroed.as_ptr() as *const _,
alsa::SND_PCM_STREAM_PLAYBACK, alsa::SND_PCM_NONBLOCK) == 0
if alsa::snd_pcm_open(&mut playback_handle,
name_zeroed.as_ptr() as *const _,
alsa::SND_PCM_STREAM_PLAYBACK,
alsa::SND_PCM_NONBLOCK) == 0
{
alsa::snd_pcm_close(playback_handle);
} else {

View File

@ -11,16 +11,16 @@ use SampleFormat;
use SamplesRate;
use UnknownTypeBuffer;
use std::{ffi, cmp, iter, mem, ptr};
use std::vec::IntoIter as VecIntoIter;
use std::{cmp, ffi, iter, mem, ptr};
use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering};
use std::vec::IntoIter as VecIntoIter;
use futures::Poll;
use futures::task::Task;
use futures::task;
use futures::stream::Stream;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
pub type SupportedFormatsIterator = VecIntoIter<Format>;
@ -34,10 +34,10 @@ struct Trigger {
impl Trigger {
fn new() -> Self {
let mut fds = [0,0];
let mut fds = [0, 0];
match unsafe { libc::pipe(fds.as_mut_ptr()) } {
0 => Trigger { fds: fds },
_ => panic!("Could not create pipe")
_ => panic!("Could not create pipe"),
}
}
fn read_fd(&self) -> libc::c_int {
@ -72,9 +72,9 @@ impl Drop for Trigger {
pub struct Endpoint(String);
impl Endpoint {
pub fn get_supported_formats_list(&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError>
{
pub fn get_supported_formats_list(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
unsafe {
let mut playback_handle = mem::uninitialized();
let device_name = ffi::CString::new(self.0.clone()).expect("Unable to get device name");
@ -90,17 +90,18 @@ impl Endpoint {
let hw_params = HwParams::alloc();
match check_errors(alsa::snd_pcm_hw_params_any(playback_handle, hw_params.0)) {
Err(_) => return Ok(Vec::new().into_iter()),
Ok(_) => ()
Ok(_) => (),
};
// TODO: check endianess
const FORMATS: [(SampleFormat, alsa::snd_pcm_format_t); 3] = [
//SND_PCM_FORMAT_S8,
//SND_PCM_FORMAT_U8,
(SampleFormat::I16, alsa::SND_PCM_FORMAT_S16_LE),
//SND_PCM_FORMAT_S16_BE,
(SampleFormat::U16, alsa::SND_PCM_FORMAT_U16_LE),
//SND_PCM_FORMAT_U16_BE,
const FORMATS: [(SampleFormat, alsa::snd_pcm_format_t); 3] =
[
//SND_PCM_FORMAT_S8,
//SND_PCM_FORMAT_U8,
(SampleFormat::I16, alsa::SND_PCM_FORMAT_S16_LE),
//SND_PCM_FORMAT_S16_BE,
(SampleFormat::U16, alsa::SND_PCM_FORMAT_U16_LE),
//SND_PCM_FORMAT_U16_BE,
/*SND_PCM_FORMAT_S24_LE,
SND_PCM_FORMAT_S24_BE,
SND_PCM_FORMAT_U24_LE,
@ -109,7 +110,7 @@ impl Endpoint {
SND_PCM_FORMAT_S32_BE,
SND_PCM_FORMAT_U32_LE,
SND_PCM_FORMAT_U32_BE,*/
(SampleFormat::F32, alsa::SND_PCM_FORMAT_FLOAT_LE),
(SampleFormat::F32, alsa::SND_PCM_FORMAT_FLOAT_LE)
/*SND_PCM_FORMAT_FLOAT_BE,
SND_PCM_FORMAT_FLOAT64_LE,
SND_PCM_FORMAT_FLOAT64_BE,
@ -132,25 +133,35 @@ impl Endpoint {
SND_PCM_FORMAT_S18_3LE,
SND_PCM_FORMAT_S18_3BE,
SND_PCM_FORMAT_U18_3LE,
SND_PCM_FORMAT_U18_3BE,*/
];
SND_PCM_FORMAT_U18_3BE,*/,
];
let mut supported_formats = Vec::new();
for &(sample_format, alsa_format) in FORMATS.iter() {
if alsa::snd_pcm_hw_params_test_format(playback_handle, hw_params.0, alsa_format) == 0 {
if alsa::snd_pcm_hw_params_test_format(playback_handle,
hw_params.0,
alsa_format) == 0
{
supported_formats.push(sample_format);
}
}
let mut min_rate = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_get_rate_min(hw_params.0, &mut min_rate, ptr::null_mut())).expect("unable to get minimum supported rete");
check_errors(alsa::snd_pcm_hw_params_get_rate_min(hw_params.0,
&mut min_rate,
ptr::null_mut()))
.expect("unable to get minimum supported rete");
let mut max_rate = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_get_rate_max(hw_params.0, &mut max_rate, ptr::null_mut())).expect("unable to get maximum supported rate");
check_errors(alsa::snd_pcm_hw_params_get_rate_max(hw_params.0,
&mut max_rate,
ptr::null_mut()))
.expect("unable to get maximum supported rate");
let samples_rates = if min_rate == max_rate {
vec![min_rate]
/*} else if alsa::snd_pcm_hw_params_test_rate(playback_handle, hw_params.0, min_rate + 1, 0) == 0 {
(min_rate .. max_rate + 1).collect()*/ // TODO: code is correct but returns lots of stuff
(min_rate .. max_rate + 1).collect()*/
// TODO: code is correct but returns lots of stuff
} else {
const RATES: [libc::c_uint; 13] = [
5512,
@ -170,7 +181,11 @@ impl Endpoint {
let mut rates = Vec::new();
for &rate in RATES.iter() {
if alsa::snd_pcm_hw_params_test_rate(playback_handle, hw_params.0, rate, 0) == 0 {
if alsa::snd_pcm_hw_params_test_rate(playback_handle,
hw_params.0,
rate,
0) == 0
{
rates.push(rate);
}
}
@ -178,36 +193,52 @@ impl Endpoint {
/*if rates.len() == 0 {
(min_rate .. max_rate + 1).collect()
} else {*/
rates // TODO: code is correct but returns lots of stuff
rates // TODO: code is correct but returns lots of stuff
//}
};
let mut min_channels = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_get_channels_min(hw_params.0, &mut min_channels)).expect("unable to get minimum supported channel count");
check_errors(alsa::snd_pcm_hw_params_get_channels_min(hw_params.0, &mut min_channels))
.expect("unable to get minimum supported channel count");
let mut max_channels = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_get_channels_max(hw_params.0, &mut max_channels)).expect("unable to get maximum supported channel count");
let max_channels = cmp::min(max_channels, 32); // TODO: limiting to 32 channels or too much stuff is returned
let supported_channels = (min_channels .. max_channels + 1).filter_map(|num| {
if alsa::snd_pcm_hw_params_test_channels(playback_handle, hw_params.0, num) == 0 {
Some([ChannelPosition::FrontLeft, ChannelPosition::FrontRight,
ChannelPosition::BackLeft, ChannelPosition::BackRight,
ChannelPosition::FrontCenter, ChannelPosition::LowFrequency]
.iter().take(num as usize).cloned().collect::<Vec<_>>())
check_errors(alsa::snd_pcm_hw_params_get_channels_max(hw_params.0, &mut max_channels))
.expect("unable to get maximum supported channel count");
let max_channels = cmp::min(max_channels, 32); // TODO: limiting to 32 channels or too much stuff is returned
let supported_channels = (min_channels .. max_channels + 1)
.filter_map(|num| if alsa::snd_pcm_hw_params_test_channels(
playback_handle,
hw_params.0,
num,
) == 0
{
Some(
[
ChannelPosition::FrontLeft,
ChannelPosition::FrontRight,
ChannelPosition::BackLeft,
ChannelPosition::BackRight,
ChannelPosition::FrontCenter,
ChannelPosition::LowFrequency,
].iter()
.take(num as usize)
.cloned()
.collect::<Vec<_>>(),
)
} else {
None
}
}).collect::<Vec<_>>();
})
.collect::<Vec<_>>();
let mut output = Vec::with_capacity(supported_formats.len() * supported_channels.len() *
samples_rates.len());
samples_rates.len());
for &data_type in supported_formats.iter() {
for channels in supported_channels.iter() {
for &rate in samples_rates.iter() {
output.push(Format {
channels: channels.clone(),
samples_rate: SamplesRate(rate as u32),
data_type: data_type,
});
channels: channels.clone(),
samples_rate: SamplesRate(rate as u32),
data_type: data_type,
});
}
}
}
@ -256,8 +287,10 @@ struct PollDescriptors {
voices: Vec<Arc<VoiceInner>>,
}
unsafe impl Send for EventLoopInner {}
unsafe impl Sync for EventLoopInner {}
unsafe impl Send for EventLoopInner {
}
unsafe impl Sync for EventLoopInner {
}
impl EventLoop {
#[inline]
@ -267,11 +300,13 @@ impl EventLoop {
EventLoop {
inner: Arc::new(EventLoopInner {
current_wait: Mutex::new(PollDescriptors {
descriptors: vec![libc::pollfd {
fd: pending_trigger.read_fd(),
events: libc::POLLIN,
revents: 0,
}],
descriptors: vec![
libc::pollfd {
fd: pending_trigger.read_fd(),
events: libc::POLLIN,
revents: 0,
},
],
voices: Vec::new(),
}),
pending_wait: Mutex::new(PollDescriptors {
@ -279,7 +314,7 @@ impl EventLoop {
voices: Vec::new(),
}),
pending_trigger: pending_trigger,
})
}),
}
}
@ -331,14 +366,19 @@ impl EventLoop {
{
let channel = *current_wait.voices[i_voice].channel.lock().unwrap();
let num_descriptors = current_wait.voices[i_voice].num_descriptors as libc::c_uint;
let num_descriptors =
current_wait.voices[i_voice].num_descriptors as libc::c_uint;
check_errors(alsa::snd_pcm_poll_descriptors_revents(channel, current_wait.descriptors
.as_mut_ptr().offset(i_descriptor),
num_descriptors, &mut revent)).unwrap();
}
if (revent as libc::c_short & libc::POLLOUT) != 0 {
let scheduled = current_wait.voices[i_voice].scheduled.lock().unwrap().take();
let scheduled = current_wait.voices[i_voice]
.scheduled
.lock()
.unwrap()
.take();
scheduled.unwrap().task.unpark();
for _ in 0 .. current_wait.voices[i_voice].num_descriptors {
@ -347,20 +387,27 @@ impl EventLoop {
current_wait.voices.remove(i_voice);
} else {
i_descriptor += current_wait.voices[i_voice].num_descriptors as isize;
i_descriptor += current_wait.voices[i_voice].num_descriptors as
isize;
i_voice += 1;
}
},
ScheduledKind::WaitResume => {
if current_wait.descriptors[i_descriptor as usize].revents != 0 {
// Unpark the task
let scheduled = current_wait.voices[i_voice].scheduled.lock().unwrap().take();
let scheduled = current_wait.voices[i_voice]
.scheduled
.lock()
.unwrap()
.take();
scheduled.unwrap().task.unpark();
// Emptying the signal.
let mut out = 0u64;
let ret = libc::read(current_wait.descriptors[i_descriptor as usize].fd,
&mut out as *mut u64 as *mut _, 8);
let ret =
libc::read(current_wait.descriptors[i_descriptor as usize].fd,
&mut out as *mut u64 as *mut _,
8);
assert_eq!(ret, 8);
// Remove from current waiting poll descriptors
@ -370,7 +417,7 @@ impl EventLoop {
i_descriptor += 1;
i_voice += 1;
}
}
},
}
}
}
@ -396,7 +443,7 @@ pub struct Scheduled {
kind: ScheduledKind,
}
#[derive(Clone,Copy)]
#[derive(Clone, Copy)]
pub enum ScheduledKind {
WaitResume,
WaitPCM,
@ -436,8 +483,10 @@ struct VoiceInner {
resume_trigger: Trigger,
}
unsafe impl Send for VoiceInner {}
unsafe impl Sync for VoiceInner {}
unsafe impl Send for VoiceInner {
}
unsafe impl Sync for VoiceInner {
}
impl SamplesStream {
#[inline]
@ -447,9 +496,9 @@ impl SamplesStream {
// We start by filling `scheduled`.
*self.inner.scheduled.lock().unwrap() = Some(Scheduled {
task: task::park(),
kind: kind,
});
task: task::park(),
kind: kind,
});
let mut pending_wait = self.inner.event_loop.pending_wait.lock().unwrap();
match kind {
@ -460,21 +509,26 @@ impl SamplesStream {
let len = pending_wait.descriptors.len();
let filled = alsa::snd_pcm_poll_descriptors(*channel,
pending_wait.descriptors.as_mut_ptr()
.offset(len as isize),
self.inner.num_descriptors as libc::c_uint);
pending_wait
.descriptors
.as_mut_ptr()
.offset(len as isize),
self.inner.num_descriptors as
libc::c_uint);
debug_assert_eq!(filled, self.inner.num_descriptors as libc::c_int);
pending_wait.descriptors.set_len(len + self.inner.num_descriptors);
pending_wait
.descriptors
.set_len(len + self.inner.num_descriptors);
},
ScheduledKind::WaitResume => {
// And we add the descriptor corresponding to the resume signal
// to `event_loop.pending_wait.descriptors`.
pending_wait.descriptors.push(libc::pollfd {
fd: self.inner.resume_trigger.read_fd(),
events: libc::POLLIN,
revents: 0,
});
}
fd: self.inner.resume_trigger.read_fd(),
events: libc::POLLIN,
revents: 0,
});
},
}
// We also fill `voices`.
@ -502,7 +556,7 @@ impl Stream for SamplesStream {
// Determine the number of samples that are available to write.
let available = {
let channel = self.inner.channel.lock().expect("could not lock channel");
let available = unsafe { alsa::snd_pcm_avail(*channel) }; // TODO: what about snd_pcm_avail_update?
let available = unsafe { alsa::snd_pcm_avail(*channel) }; // TODO: what about snd_pcm_avail_update?
if available == -32 {
// buffer underrun
@ -525,7 +579,9 @@ impl Stream for SamplesStream {
match self.inner.sample_format {
SampleFormat::I16 => {
let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }).take(available).collect(),
buffer: iter::repeat(unsafe { mem::uninitialized() })
.take(available)
.collect(),
inner: self.inner.clone(),
};
@ -533,7 +589,9 @@ impl Stream for SamplesStream {
},
SampleFormat::U16 => {
let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }).take(available).collect(),
buffer: iter::repeat(unsafe { mem::uninitialized() })
.take(available)
.collect(),
inner: self.inner.clone(),
};
@ -541,7 +599,9 @@ impl Stream for SamplesStream {
},
SampleFormat::F32 => {
let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }).take(available).collect(),
buffer: iter::repeat(unsafe { mem::uninitialized() })
.take(available)
.collect(),
inner: self.inner.clone(),
};
@ -558,7 +618,8 @@ impl HwParams {
pub fn alloc() -> HwParams {
unsafe {
let mut hw_params = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_malloc(&mut hw_params)).expect("unable to get hardware parameters");
check_errors(alsa::snd_pcm_hw_params_malloc(&mut hw_params))
.expect("unable to get hardware parameters");
HwParams(hw_params)
}
}
@ -574,8 +635,7 @@ impl Drop for HwParams {
impl Voice {
pub fn new(endpoint: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError>
{
-> Result<(Voice, SamplesStream), CreationError> {
unsafe {
let name = ffi::CString::new(endpoint.0.clone()).expect("unable to clone endpoint");
@ -595,33 +655,62 @@ impl Voice {
};
let hw_params = HwParams::alloc();
check_errors(alsa::snd_pcm_hw_params_any(playback_handle, hw_params.0)).expect("Errors on playback handle");
check_errors(alsa::snd_pcm_hw_params_set_access(playback_handle, hw_params.0, alsa::SND_PCM_ACCESS_RW_INTERLEAVED)).expect("handle not acessible");
check_errors(alsa::snd_pcm_hw_params_set_format(playback_handle, hw_params.0, data_type)).expect("format could not be set");
check_errors(alsa::snd_pcm_hw_params_set_rate(playback_handle, hw_params.0, format.samples_rate.0 as libc::c_uint, 0)).expect("sample rate could not be set");
check_errors(alsa::snd_pcm_hw_params_set_channels(playback_handle, hw_params.0, format.channels.len() as libc::c_uint)).expect("channel count could not be set");
let mut max_buffer_size = format.samples_rate.0 as alsa::snd_pcm_uframes_t / format.channels.len() as alsa::snd_pcm_uframes_t / 5; // 200ms of buffer
check_errors(alsa::snd_pcm_hw_params_set_buffer_size_max(playback_handle, hw_params.0, &mut max_buffer_size)).unwrap();
check_errors(alsa::snd_pcm_hw_params(playback_handle, hw_params.0)).expect("hardware params could not be set");
check_errors(alsa::snd_pcm_hw_params_any(playback_handle, hw_params.0))
.expect("Errors on playback handle");
check_errors(alsa::snd_pcm_hw_params_set_access(playback_handle,
hw_params.0,
alsa::SND_PCM_ACCESS_RW_INTERLEAVED))
.expect("handle not acessible");
check_errors(alsa::snd_pcm_hw_params_set_format(playback_handle,
hw_params.0,
data_type))
.expect("format could not be set");
check_errors(alsa::snd_pcm_hw_params_set_rate(playback_handle,
hw_params.0,
format.samples_rate.0 as libc::c_uint,
0))
.expect("sample rate could not be set");
check_errors(alsa::snd_pcm_hw_params_set_channels(playback_handle,
hw_params.0,
format.channels.len() as
libc::c_uint))
.expect("channel count could not be set");
let mut max_buffer_size = format.samples_rate.0 as alsa::snd_pcm_uframes_t /
format.channels.len() as alsa::snd_pcm_uframes_t /
5; // 200ms of buffer
check_errors(alsa::snd_pcm_hw_params_set_buffer_size_max(playback_handle,
hw_params.0,
&mut max_buffer_size))
.unwrap();
check_errors(alsa::snd_pcm_hw_params(playback_handle, hw_params.0))
.expect("hardware params could not be set");
let mut sw_params = mem::uninitialized(); // TODO: RAII
let mut sw_params = mem::uninitialized(); // TODO: RAII
check_errors(alsa::snd_pcm_sw_params_malloc(&mut sw_params)).unwrap();
check_errors(alsa::snd_pcm_sw_params_current(playback_handle, sw_params)).unwrap();
check_errors(alsa::snd_pcm_sw_params_set_start_threshold(playback_handle, sw_params, 0)).unwrap();
check_errors(alsa::snd_pcm_sw_params_set_start_threshold(playback_handle,
sw_params,
0))
.unwrap();
let (buffer_len, period_len) = {
let mut buffer = mem::uninitialized();
let mut period = mem::uninitialized();
check_errors(alsa::snd_pcm_get_params(playback_handle, &mut buffer, &mut period)).expect("could not initialize buffer");
check_errors(alsa::snd_pcm_get_params(playback_handle, &mut buffer, &mut period))
.expect("could not initialize buffer");
assert!(buffer != 0);
check_errors(alsa::snd_pcm_sw_params_set_avail_min(playback_handle, sw_params, period)).unwrap();
check_errors(alsa::snd_pcm_sw_params_set_avail_min(playback_handle,
sw_params,
period))
.unwrap();
let buffer = buffer as usize * format.channels.len();
let period = period as usize * format.channels.len();
(buffer, period)
};
check_errors(alsa::snd_pcm_sw_params(playback_handle, sw_params)).unwrap();
check_errors(alsa::snd_pcm_prepare(playback_handle)).expect("could not get playback handle");
check_errors(alsa::snd_pcm_prepare(playback_handle))
.expect("could not get playback handle");
let num_descriptors = {
let num_descriptors = alsa::snd_pcm_poll_descriptors_count(playback_handle);
@ -630,23 +719,20 @@ impl Voice {
};
let samples_stream_inner = Arc::new(VoiceInner {
event_loop: event_loop.inner.clone(),
channel: Mutex::new(playback_handle),
sample_format: format.data_type,
num_descriptors: num_descriptors,
num_channels: format.channels.len() as u16,
buffer_len: buffer_len,
period_len: period_len,
scheduled: Mutex::new(None),
is_paused: AtomicBool::new(true),
resume_trigger: Trigger::new(),
});
event_loop: event_loop.inner.clone(),
channel: Mutex::new(playback_handle),
sample_format: format.data_type,
num_descriptors: num_descriptors,
num_channels: format.channels.len() as u16,
buffer_len: buffer_len,
period_len: period_len,
scheduled: Mutex::new(None),
is_paused: AtomicBool::new(true),
resume_trigger: Trigger::new(),
});
Ok((Voice {
inner: samples_stream_inner.clone()
}, SamplesStream {
inner: samples_stream_inner
}))
Ok((Voice { inner: samples_stream_inner.clone() },
SamplesStream { inner: samples_stream_inner }))
}
}
@ -686,15 +772,17 @@ impl<T> Buffer<T> {
}
pub fn finish(self) {
let to_write = (self.buffer.len() / self.inner.num_channels as usize)
as alsa::snd_pcm_uframes_t;
let channel = self.inner.channel.lock().expect("Buffer channel lock failed");
let to_write = (self.buffer.len() / self.inner.num_channels as usize) as
alsa::snd_pcm_uframes_t;
let channel = self.inner
.channel
.lock()
.expect("Buffer channel lock failed");
unsafe {
loop {
let result = alsa::snd_pcm_writei(*channel,
self.buffer.as_ptr() as *const _,
to_write);
let result =
alsa::snd_pcm_writei(*channel, self.buffer.as_ptr() as *const _, to_write);
if result == -32 {
// buffer underrun
@ -716,7 +804,9 @@ fn check_errors(err: libc::c_int) -> Result<(), String> {
if err < 0 {
unsafe {
let s = ffi::CStr::from_ptr(alsa::snd_strerror(err)).to_bytes().to_vec();
let s = ffi::CStr::from_ptr(alsa::snd_strerror(err))
.to_bytes()
.to_vec();
let s = String::from_utf8(s).expect("Streaming error occured");
return Err(s);
}

View File

@ -1,13 +1,15 @@
use super::Endpoint;
use ::Format;
use Format;
use std::vec::IntoIter as VecIntoIter;
pub struct EndpointsIterator(bool);
unsafe impl Send for EndpointsIterator {}
unsafe impl Sync for EndpointsIterator {}
unsafe impl Send for EndpointsIterator {
}
unsafe impl Sync for EndpointsIterator {
}
impl Default for EndpointsIterator {
fn default() -> Self {
@ -18,7 +20,12 @@ impl Default for EndpointsIterator {
impl Iterator for EndpointsIterator {
type Item = Endpoint;
fn next(&mut self) -> Option<Endpoint> {
if self.0 { None } else { self.0 = true; Some(Endpoint) }
if self.0 {
None
} else {
self.0 = true;
Some(Endpoint)
}
}
}

View File

@ -1,20 +1,20 @@
extern crate coreaudio;
extern crate libc;
use ChannelPosition;
use CreationError;
use Format;
use FormatsEnumerationError;
use Sample;
use SampleFormat;
use SamplesRate;
use ChannelPosition;
use UnknownTypeBuffer;
use futures::Poll;
use futures::Async;
use futures::task::Task;
use futures::task;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
@ -24,22 +24,24 @@ use self::coreaudio::audio_unit::render_callback::{self, data};
mod enumerate;
pub use self::enumerate::{EndpointsIterator,
SupportedFormatsIterator,
get_default_endpoint};
pub use self::enumerate::{EndpointsIterator, SupportedFormatsIterator, get_default_endpoint};
#[derive(Clone, PartialEq, Eq)]
pub struct Endpoint;
impl Endpoint {
pub fn get_supported_formats_list(&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError>
{
Ok(vec!(Format {
channels: vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
samples_rate: SamplesRate(44100),
data_type: SampleFormat::F32
}).into_iter())
pub fn get_supported_formats_list(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
Ok(
vec![
Format {
channels: vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
samples_rate: SamplesRate(44100),
data_type: SampleFormat::F32,
},
].into_iter(),
)
}
pub fn get_name(&self) -> String {
@ -50,7 +52,9 @@ impl Endpoint {
pub struct EventLoop;
impl EventLoop {
#[inline]
pub fn new() -> EventLoop { EventLoop }
pub fn new() -> EventLoop {
EventLoop
}
#[inline]
pub fn run(&self) {
loop {
@ -65,7 +69,9 @@ pub struct Buffer<T> {
buffer: Vec<T>,
}
impl<T> Buffer<T> where T: Sample {
impl<T> Buffer<T>
where T: Sample
{
#[inline]
pub fn get_buffer(&mut self) -> &mut [T] {
&mut self.buffer[..]
@ -123,7 +129,7 @@ impl Stream for SamplesStream {
None => {
inner.scheduled_task = Some(task::park());
return Ok(Async::NotReady);
}
},
};
let buffer_len = current_callback.num_frames * current_callback.data.channels().count();
@ -139,12 +145,11 @@ impl Stream for SamplesStream {
impl Voice {
pub fn new(_: &Endpoint, _: &Format, _: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError>
{
-> Result<(Voice, SamplesStream), CreationError> {
let inner = Arc::new(Mutex::new(SamplesStreamInner {
scheduled_task: None,
current_callback: None,
}));
scheduled_task: None,
current_callback: None,
}));
fn convert_error(err: coreaudio::Error) -> CreationError {
match err {
@ -158,14 +163,14 @@ impl Voice {
}
let au_type = if cfg!(target_os = "ios") {
// The DefaultOutput unit isn't available in iOS unfortunately. RemoteIO is a sensible replacement.
// See
// https://developer.apple.com/library/content/documentation/MusicAudio/Conceptual/AudioUnitHostingGuide_iOS/UsingSpecificAudioUnits/UsingSpecificAudioUnits.html
// The DefaultOutput unit isn't available in iOS unfortunately. RemoteIO is a sensible replacement.
// See
// https://developer.apple.com/library/content/documentation/MusicAudio/Conceptual/AudioUnitHostingGuide_iOS/UsingSpecificAudioUnits/UsingSpecificAudioUnits.html
coreaudio::audio_unit::IOType::RemoteIO
} else {
coreaudio::audio_unit::IOType::DefaultOutput
};
let mut audio_unit = try!(AudioUnit::new(au_type).map_err(convert_error));
let mut audio_unit = AudioUnit::new(au_type).map_err(convert_error)?;
// TODO: iOS uses integer and fixed-point data
@ -199,10 +204,10 @@ impl Voice {
Ok(())
});
try!(result.map_err(convert_error));
result.map_err(convert_error)?;
}
try!(audio_unit.start().map_err(convert_error));
audio_unit.start().map_err(convert_error)?;
let au_arc = Arc::new(Mutex::new(audio_unit));
@ -212,9 +217,10 @@ impl Voice {
};
Ok((Voice {
playing: true,
audio_unit: au_arc.clone(),
}, samples_stream))
playing: true,
audio_unit: au_arc.clone(),
},
samples_stream))
}
#[inline]

View File

@ -74,31 +74,32 @@ extern crate futures;
extern crate lazy_static;
extern crate libc;
pub use samples_formats::{SampleFormat, Sample};
pub use samples_formats::{Sample, SampleFormat};
#[cfg(all(not(windows), not(target_os = "linux"), not(target_os = "freebsd"), not(target_os = "macos"), not(target_os = "ios")))]
#[cfg(all(not(windows), not(target_os = "linux"), not(target_os = "freebsd"),
not(target_os = "macos"), not(target_os = "ios")))]
use null as cpal_impl;
use std::fmt;
use std::error::Error;
use std::fmt;
use std::ops::{Deref, DerefMut};
use futures::stream::Stream;
use futures::Poll;
use futures::stream::Stream;
mod null;
mod samples_formats;
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
#[path="alsa/mod.rs"]
#[path = "alsa/mod.rs"]
mod cpal_impl;
#[cfg(windows)]
#[path="wasapi/mod.rs"]
#[path = "wasapi/mod.rs"]
mod cpal_impl;
#[cfg(any(target_os = "macos", target_os = "ios"))]
#[path="coreaudio/mod.rs"]
#[path = "coreaudio/mod.rs"]
mod cpal_impl;
/// An iterator for the list of formats that are supported by the backend.
@ -151,18 +152,16 @@ pub struct Endpoint(cpal_impl::Endpoint);
impl Endpoint {
/// Returns an iterator that produces the list of formats that are supported by the backend.
#[inline]
pub fn supported_formats(&self) -> Result<SupportedFormatsIterator,
FormatsEnumerationError>
{
Ok(SupportedFormatsIterator(try!(self.0.get_supported_formats_list())))
pub fn supported_formats(&self) -> Result<SupportedFormatsIterator, FormatsEnumerationError> {
Ok(SupportedFormatsIterator(self.0.get_supported_formats_list()?))
}
/// Deprecated. Use `supported_formats` instead.
#[inline]
#[deprecated]
pub fn get_supported_formats_list(&self) -> Result<SupportedFormatsIterator,
FormatsEnumerationError>
{
pub fn get_supported_formats_list(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
self.supported_formats()
}
@ -254,7 +253,9 @@ impl EventLoop {
/// You should destroy this object as soon as possible. Data is only committed when it
/// is destroyed.
#[must_use]
pub struct Buffer<T> where T: Sample {
pub struct Buffer<T>
where T: Sample
{
// also contains something, taken by `Drop`
target: Option<cpal_impl::Buffer<T>>,
}
@ -363,9 +364,8 @@ impl Voice {
/// Builds a new channel.
#[inline]
pub fn new(endpoint: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError>
{
let (voice, stream) = try!(cpal_impl::Voice::new(&endpoint.0, format, &event_loop.0));
-> Result<(Voice, SamplesStream), CreationError> {
let (voice, stream) = cpal_impl::Voice::new(&endpoint.0, format, &event_loop.0)?;
let voice = Voice {
voice: voice,
@ -447,7 +447,9 @@ impl Stream for SamplesStream {
}
}
impl<T> Deref for Buffer<T> where T: Sample {
impl<T> Deref for Buffer<T>
where T: Sample
{
type Target = [T];
#[inline]
@ -456,14 +458,18 @@ impl<T> Deref for Buffer<T> where T: Sample {
}
}
impl<T> DerefMut for Buffer<T> where T: Sample {
impl<T> DerefMut for Buffer<T>
where T: Sample
{
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
self.target.as_mut().unwrap().get_buffer()
}
}
impl<T> Drop for Buffer<T> where T: Sample {
impl<T> Drop for Buffer<T>
where T: Sample
{
#[inline]
fn drop(&mut self) {
self.target.take().unwrap().finish();

View File

@ -2,9 +2,9 @@
use std::marker::PhantomData;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use futures::Async;
use CreationError;
use Format;
@ -14,9 +14,13 @@ use UnknownTypeBuffer;
pub struct EventLoop;
impl EventLoop {
#[inline]
pub fn new() -> EventLoop { EventLoop }
pub fn new() -> EventLoop {
EventLoop
}
#[inline]
pub fn run(&self) { loop { /* TODO: don't spin */ } }
pub fn run(&self) {
loop { /* TODO: don't spin */ }
}
}
#[derive(Default)]
@ -41,9 +45,9 @@ pub struct Endpoint;
impl Endpoint {
#[inline]
pub fn get_supported_formats_list(&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError>
{
pub fn get_supported_formats_list(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
unreachable!()
}
@ -70,8 +74,7 @@ pub struct SamplesStream;
impl Voice {
#[inline]
pub fn new(_: &Endpoint, _: &Format, _: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError>
{
-> Result<(Voice, SamplesStream), CreationError> {
Err(CreationError::DeviceNotAvailable)
}

View File

@ -1,9 +1,9 @@
//! Handles COM initialization and cleanup.
use std::ptr;
use super::winapi;
use super::ole32;
use super::check_result;
use super::ole32;
use super::winapi;
use std::ptr;
thread_local!(static COM_INITIALIZED: ComInitialized = {
unsafe {

View File

@ -1,8 +1,9 @@
use super::winapi;
use super::ole32;
use super::com;
use super::Endpoint;
use super::check_result;
use super::com;
use super::ole32;
use super::winapi;
use std::mem;
use std::ptr;
@ -33,8 +34,10 @@ lazy_static! {
/// RAII object around `winapi::IMMDeviceEnumerator`.
struct Enumerator(*mut winapi::IMMDeviceEnumerator);
unsafe impl Send for Enumerator {}
unsafe impl Sync for Enumerator {}
unsafe impl Send for Enumerator {
}
unsafe impl Sync for Enumerator {
}
impl Drop for Enumerator {
#[inline]
@ -52,8 +55,10 @@ pub struct EndpointsIterator {
next_item: u32,
}
unsafe impl Send for EndpointsIterator {}
unsafe impl Sync for EndpointsIterator {}
unsafe impl Send for EndpointsIterator {
}
unsafe impl Sync for EndpointsIterator {
}
impl Drop for EndpointsIterator {
#[inline]
@ -72,7 +77,7 @@ impl Default for EndpointsIterator {
check_result((*ENUMERATOR.0).EnumAudioEndpoints(winapi::eRender,
winapi::DEVICE_STATE_ACTIVE,
&mut collection))
.unwrap();
.unwrap();
let mut count = mem::uninitialized();
// can fail if the parameter is null, which should never happen
@ -116,11 +121,11 @@ impl Iterator for EndpointsIterator {
pub fn get_default_endpoint() -> Option<Endpoint> {
unsafe {
let mut device = mem::uninitialized();
let hres = (*ENUMERATOR.0).GetDefaultAudioEndpoint(winapi::eRender,
winapi::eConsole, &mut device);
let hres = (*ENUMERATOR.0)
.GetDefaultAudioEndpoint(winapi::eRender, winapi::eConsole, &mut device);
if let Err(_err) = check_result(hres) {
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
}
Some(Endpoint::from_immdevice(device))

View File

@ -2,23 +2,23 @@ extern crate winapi;
extern crate ole32;
extern crate kernel32;
use std::io::Error as IoError;
use std::os::windows::ffi::OsStringExt;
use std::ffi::OsString;
use std::sync::{Arc, Mutex, MutexGuard};
use std::ptr;
use std::io::Error as IoError;
use std::mem;
use std::os::windows::ffi::OsStringExt;
use std::ptr;
use std::slice;
use std::sync::{Arc, Mutex, MutexGuard};
use ChannelPosition;
use Format;
use FormatsEnumerationError;
use ChannelPosition;
use SamplesRate;
use SampleFormat;
use SamplesRate;
pub use std::option::IntoIter as OptionIntoIter;
pub use self::enumerate::{EndpointsIterator, get_default_endpoint};
pub use self::voice::{Voice, Buffer, EventLoop, SamplesStream};
pub use self::voice::{Buffer, EventLoop, SamplesStream, Voice};
pub use std::option::IntoIter as OptionIntoIter;
pub type SupportedFormatsIterator = OptionIntoIter<Format>;
@ -38,8 +38,10 @@ fn check_result(result: winapi::HRESULT) -> Result<(), IoError> {
/// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers.
#[derive(Copy, Clone)]
struct IAudioClientWrapper(*mut winapi::IAudioClient);
unsafe impl Send for IAudioClientWrapper {}
unsafe impl Sync for IAudioClientWrapper {}
unsafe impl Send for IAudioClientWrapper {
}
unsafe impl Sync for IAudioClientWrapper {
}
/// An opaque type that identifies an end point.
pub struct Endpoint {
@ -47,11 +49,13 @@ pub struct Endpoint {
/// We cache an uninitialized `IAudioClient` so that we can call functions from it without
/// having to create/destroy audio clients all the time.
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
}
unsafe impl Send for Endpoint {}
unsafe impl Sync for Endpoint {}
unsafe impl Send for Endpoint {
}
unsafe impl Sync for Endpoint {
}
impl Endpoint {
// TODO: this function returns a GUID of the endpoin
@ -87,7 +91,8 @@ impl Endpoint {
}
/// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it.
fn ensure_future_audio_client(&self) -> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
fn ensure_future_audio_client(&self)
-> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
let mut lock = self.future_audio_client.lock().unwrap();
if lock.is_some() {
return Ok(lock);
@ -95,12 +100,14 @@ impl Endpoint {
let audio_client: *mut winapi::IAudioClient = unsafe {
let mut audio_client = mem::uninitialized();
let hresult = (*self.device).Activate(&winapi::IID_IAudioClient, winapi::CLSCTX_ALL,
ptr::null_mut(), &mut audio_client);
let hresult = (*self.device).Activate(&winapi::IID_IAudioClient,
winapi::CLSCTX_ALL,
ptr::null_mut(),
&mut audio_client);
// can fail if the device has been disconnected since we enumerated it, or if
// the device doesn't support playback for some reason
try!(check_result(hresult));
check_result(hresult)?;
assert!(!audio_client.is_null());
audio_client as *mut _
};
@ -112,15 +119,15 @@ impl Endpoint {
/// Returns an uninitialized `IAudioClient`.
#[inline]
fn build_audioclient(&self) -> Result<*mut winapi::IAudioClient, IoError> {
let mut lock = try!(self.ensure_future_audio_client());
let mut lock = self.ensure_future_audio_client()?;
let client = lock.unwrap().0;
*lock = None;
Ok(client)
}
pub fn get_supported_formats_list(&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError>
{
pub fn get_supported_formats_list(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
// We always create voices in shared mode, therefore all samples go through an audio
// processor to mix them together.
// However there is no way to query the list of all formats that are supported by the
@ -150,10 +157,8 @@ impl Endpoint {
let format = {
let (channels, data_type) = match (*format_ptr).wFormatTag {
winapi::WAVE_FORMAT_PCM => {
(
vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
SampleFormat::I16
)
(vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
SampleFormat::I16)
},
winapi::WAVE_FORMAT_EXTENSIBLE => {
let format_ptr = format_ptr as *const winapi::WAVEFORMATEXTENSIBLE;
@ -162,24 +167,60 @@ impl Endpoint {
let mut channels = Vec::new();
let mask = (*format_ptr).dwChannelMask;
if (mask & winapi::SPEAKER_FRONT_LEFT) != 0 { channels.push(ChannelPosition::FrontLeft); }
if (mask & winapi::SPEAKER_FRONT_RIGHT) != 0 { channels.push(ChannelPosition::FrontRight); }
if (mask & winapi::SPEAKER_FRONT_CENTER) != 0 { channels.push(ChannelPosition::FrontCenter); }
if (mask & winapi::SPEAKER_LOW_FREQUENCY) != 0 { channels.push(ChannelPosition::LowFrequency); }
if (mask & winapi::SPEAKER_BACK_LEFT) != 0 { channels.push(ChannelPosition::BackLeft); }
if (mask & winapi::SPEAKER_BACK_RIGHT) != 0 { channels.push(ChannelPosition::BackRight); }
if (mask & winapi::SPEAKER_FRONT_LEFT_OF_CENTER) != 0 { channels.push(ChannelPosition::FrontLeftOfCenter); }
if (mask & winapi::SPEAKER_FRONT_RIGHT_OF_CENTER) != 0 { channels.push(ChannelPosition::FrontRightOfCenter); }
if (mask & winapi::SPEAKER_BACK_CENTER) != 0 { channels.push(ChannelPosition::BackCenter); }
if (mask & winapi::SPEAKER_SIDE_LEFT) != 0 { channels.push(ChannelPosition::SideLeft); }
if (mask & winapi::SPEAKER_SIDE_RIGHT) != 0 { channels.push(ChannelPosition::SideRight); }
if (mask & winapi::SPEAKER_TOP_CENTER) != 0 { channels.push(ChannelPosition::TopCenter); }
if (mask & winapi::SPEAKER_TOP_FRONT_LEFT) != 0 { channels.push(ChannelPosition::TopFrontLeft); }
if (mask & winapi::SPEAKER_TOP_FRONT_CENTER) != 0 { channels.push(ChannelPosition::TopFrontCenter); }
if (mask & winapi::SPEAKER_TOP_FRONT_RIGHT) != 0 { channels.push(ChannelPosition::TopFrontRight); }
if (mask & winapi::SPEAKER_TOP_BACK_LEFT) != 0 { channels.push(ChannelPosition::TopBackLeft); }
if (mask & winapi::SPEAKER_TOP_BACK_CENTER) != 0 { channels.push(ChannelPosition::TopBackCenter); }
if (mask & winapi::SPEAKER_TOP_BACK_RIGHT) != 0 { channels.push(ChannelPosition::TopBackRight); }
if (mask & winapi::SPEAKER_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::FrontLeft);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::FrontRight);
}
if (mask & winapi::SPEAKER_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::FrontCenter);
}
if (mask & winapi::SPEAKER_LOW_FREQUENCY) != 0 {
channels.push(ChannelPosition::LowFrequency);
}
if (mask & winapi::SPEAKER_BACK_LEFT) != 0 {
channels.push(ChannelPosition::BackLeft);
}
if (mask & winapi::SPEAKER_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::BackRight);
}
if (mask & winapi::SPEAKER_FRONT_LEFT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontLeftOfCenter);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontRightOfCenter);
}
if (mask & winapi::SPEAKER_BACK_CENTER) != 0 {
channels.push(ChannelPosition::BackCenter);
}
if (mask & winapi::SPEAKER_SIDE_LEFT) != 0 {
channels.push(ChannelPosition::SideLeft);
}
if (mask & winapi::SPEAKER_SIDE_RIGHT) != 0 {
channels.push(ChannelPosition::SideRight);
}
if (mask & winapi::SPEAKER_TOP_CENTER) != 0 {
channels.push(ChannelPosition::TopCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::TopFrontLeft);
}
if (mask & winapi::SPEAKER_TOP_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::TopFrontCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::TopFrontRight);
}
if (mask & winapi::SPEAKER_TOP_BACK_LEFT) != 0 {
channels.push(ChannelPosition::TopBackLeft);
}
if (mask & winapi::SPEAKER_TOP_BACK_CENTER) != 0 {
channels.push(ChannelPosition::TopBackCenter);
}
if (mask & winapi::SPEAKER_TOP_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::TopBackRight);
}
assert_eq!((*format_ptr).Format.nChannels as usize, channels.len());
channels
@ -187,8 +228,8 @@ impl Endpoint {
let format = {
fn cmp_guid(a: &winapi::GUID, b: &winapi::GUID) -> bool {
a.Data1 == b.Data1 && a.Data2 == b.Data2 &&
a.Data3 == b.Data3 && a.Data4 == b.Data4
a.Data1 == b.Data1 && a.Data2 == b.Data2 && a.Data3 == b.Data3 &&
a.Data4 == b.Data4
}
if cmp_guid(&(*format_ptr).SubFormat,
&winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
@ -207,7 +248,7 @@ impl Endpoint {
(channels, format)
},
f => panic!("Unknown data format returned by GetMixFormat: {:?}", f)
f => panic!("Unknown data format returned by GetMixFormat: {:?}", f),
};
Format {
@ -231,12 +272,15 @@ impl PartialEq for Endpoint {
}
}
impl Eq for Endpoint {}
impl Eq for Endpoint {
}
impl Clone for Endpoint {
#[inline]
fn clone(&self) -> Endpoint {
unsafe { (*self.device).AddRef(); }
unsafe {
(*self.device).AddRef();
}
Endpoint {
device: self.device,
@ -248,10 +292,14 @@ impl Clone for Endpoint {
impl Drop for Endpoint {
#[inline]
fn drop(&mut self) {
unsafe { (*self.device).Release(); }
unsafe {
(*self.device).Release();
}
if let Some(client) = self.future_audio_client.lock().unwrap().take() {
unsafe { (*client.0).Release(); }
unsafe {
(*client.0).Release();
}
}
}
}

View File

@ -1,26 +1,27 @@
use super::Endpoint;
use super::check_result;
use super::com;
use super::kernel32;
use super::ole32;
use super::winapi;
use super::Endpoint;
use super::check_result;
use std::slice;
use std::mem;
use std::ptr;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::slice;
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use futures::Poll;
use futures::task::Task;
use futures::task;
use futures::stream::Stream;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
use CreationError;
use ChannelPosition;
use CreationError;
use Format;
use SampleFormat;
use UnknownTypeBuffer;
@ -29,8 +30,10 @@ pub struct EventLoop {
inner: Arc<EventLoopInner>,
}
unsafe impl Send for EventLoop {}
unsafe impl Sync for EventLoop {}
unsafe impl Send for EventLoop {
}
unsafe impl Sync for EventLoop {
}
struct EventLoopInner {
// List of handles that are currently being polled or that are going to be polled. This mutex
@ -67,22 +70,21 @@ struct EventLoopScheduled {
impl EventLoop {
pub fn new() -> EventLoop {
let pending_scheduled_event = unsafe {
kernel32::CreateEventA(ptr::null_mut(), 0, 0, ptr::null())
};
let pending_scheduled_event =
unsafe { kernel32::CreateEventA(ptr::null_mut(), 0, 0, ptr::null()) };
EventLoop {
inner: Arc::new(EventLoopInner {
pending_scheduled_event: pending_scheduled_event,
scheduled: Mutex::new(EventLoopScheduled {
handles: vec![pending_scheduled_event],
task_handles: vec![],
}),
pending_scheduled: Mutex::new(EventLoopScheduled {
handles: vec![],
task_handles: vec![],
})
})
pending_scheduled_event: pending_scheduled_event,
scheduled: Mutex::new(EventLoopScheduled {
handles: vec![pending_scheduled_event],
task_handles: vec![],
}),
pending_scheduled: Mutex::new(EventLoopScheduled {
handles: vec![],
task_handles: vec![],
}),
}),
}
}
@ -101,7 +103,8 @@ impl EventLoop {
// sound needs a buffer.
let result = kernel32::WaitForMultipleObjectsEx(scheduled.handles.len() as u32,
scheduled.handles.as_ptr(),
winapi::FALSE, winapi::INFINITE, /* TODO: allow setting a timeout */
winapi::FALSE,
winapi::INFINITE, /* TODO: allow setting a timeout */
winapi::FALSE /* irrelevant parameter here */);
// Notifying the corresponding task handler.
@ -144,27 +147,30 @@ pub struct SamplesStream {
event_loop: Arc<EventLoopInner>,
inner: Arc<Mutex<VoiceInner>>,
// The event that is signalled whenever a buffer is ready to be submitted to the voice.
event: winapi::HANDLE, // TODO: not deleted
event: winapi::HANDLE, // TODO: not deleted
max_frames_in_buffer: winapi::UINT32,
bytes_per_frame: winapi::WORD,
ready: Arc<AtomicBool>,
}
unsafe impl Send for SamplesStream {}
unsafe impl Sync for SamplesStream {}
unsafe impl Send for SamplesStream {
}
unsafe impl Sync for SamplesStream {
}
struct VoiceInner {
audio_client: *mut winapi::IAudioClient,
render_client: *mut winapi::IAudioRenderClient,
}
unsafe impl Send for Voice {}
unsafe impl Sync for Voice {}
unsafe impl Send for Voice {
}
unsafe impl Sync for Voice {
}
impl Voice {
pub fn new(end_point: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError>
{
-> Result<(Voice, SamplesStream), CreationError> {
unsafe {
// Making sure that COM is initialized.
// It's not actually sure that this is required, but when in doubt do it.
@ -179,14 +185,15 @@ impl Voice {
// Computing the format and initializing the device.
let format = {
let format_attempt = try!(format_to_waveformatextensible(format));
let format_attempt = format_to_waveformatextensible(format)?;
let share_mode = winapi::AUDCLNT_SHAREMODE_SHARED;
// `IsFormatSupported` checks whether the format is supported and fills
// a `WAVEFORMATEX`
let mut dummy_fmt_ptr: *mut winapi::WAVEFORMATEX = mem::uninitialized();
let hresult = (*audio_client).IsFormatSupported(share_mode, &format_attempt.Format,
&mut dummy_fmt_ptr);
let hresult =
(*audio_client)
.IsFormatSupported(share_mode, &format_attempt.Format, &mut dummy_fmt_ptr);
// we free that `WAVEFORMATEX` immediately after because we don't need it
if !dummy_fmt_ptr.is_null() {
ole32::CoTaskMemFree(dummy_fmt_ptr as *mut _);
@ -196,8 +203,7 @@ impl Voice {
// has been found) but we also treat this as an error
match (hresult, check_result(hresult)) {
(_, Err(ref e))
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(CreationError::DeviceNotAvailable);
},
@ -215,10 +221,13 @@ impl Voice {
// finally initializing the audio client
let hresult = (*audio_client).Initialize(share_mode,
winapi::AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
0, 0, &format_attempt.Format, ptr::null());
0,
0,
&format_attempt.Format,
ptr::null());
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{
Err(ref e)
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(CreationError::DeviceNotAvailable);
},
@ -245,7 +254,7 @@ impl Voice {
(*audio_client).Release();
panic!("Failed to call SetEventHandle")
},
Ok(_) => ()
Ok(_) => (),
};
event
@ -257,8 +266,8 @@ impl Voice {
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{
Err(ref e)
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(CreationError::DeviceNotAvailable);
},
@ -276,13 +285,13 @@ impl Voice {
let render_client = {
let mut render_client: *mut winapi::IAudioRenderClient = mem::uninitialized();
let hresult = (*audio_client).GetService(&winapi::IID_IAudioRenderClient,
&mut render_client
as *mut *mut winapi::IAudioRenderClient
as *mut _);
&mut render_client as
*mut *mut winapi::IAudioRenderClient as
*mut _);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{
Err(ref e)
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();
return Err(CreationError::DeviceNotAvailable);
},
@ -298,9 +307,9 @@ impl Voice {
// Everything went fine.
let inner = Arc::new(Mutex::new(VoiceInner {
audio_client: audio_client,
render_client: render_client,
}));
audio_client: audio_client,
render_client: render_client,
}));
let voice = Voice {
inner: inner.clone(),
@ -354,7 +363,9 @@ impl SamplesStream {
fn schedule(&mut self) {
let mut pending = self.event_loop.pending_scheduled.lock().unwrap();
pending.handles.push(self.event);
pending.task_handles.push((task::park(), self.ready.clone()));
pending
.task_handles
.push((task::park(), self.ready.clone()));
drop(pending);
let result = unsafe { kernel32::SetEvent(self.event_loop.pending_scheduled_event) };
@ -379,7 +390,7 @@ impl Stream for SamplesStream {
self.schedule();
return Ok(Async::NotReady);
},
_ => unreachable!()
_ => unreachable!(),
};
}
@ -403,13 +414,14 @@ impl Stream for SamplesStream {
// Obtaining a pointer to the buffer.
let (buffer_data, buffer_len) = {
let mut buffer: *mut winapi::BYTE = mem::uninitialized();
let hresult = (*inner.render_client).GetBuffer(frames_available,
&mut buffer as *mut *mut _);
check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED`
let hresult = (*inner.render_client)
.GetBuffer(frames_available, &mut buffer as *mut *mut _);
check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED`
debug_assert!(!buffer.is_null());
(buffer as *mut _,
frames_available as usize * self.bytes_per_frame as usize / mem::size_of::<f32>()) // FIXME: correct size
frames_available as usize * self.bytes_per_frame as usize /
mem::size_of::<f32>()) // FIXME: correct size
};
let buffer = Buffer {
@ -419,7 +431,9 @@ impl Stream for SamplesStream {
frames: frames_available,
};
Ok(Async::Ready(Some(UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) })))) // FIXME: not necessarily F32
Ok(Async::Ready(Some(UnknownTypeBuffer::F32(::Buffer {
target: Some(buffer),
})))) // FIXME: not necessarily F32
}
};
@ -450,14 +464,13 @@ pub struct Buffer<T> {
frames: winapi::UINT32,
}
unsafe impl<T> Send for Buffer<T> {}
unsafe impl<T> Send for Buffer<T> {
}
impl<T> Buffer<T> {
#[inline]
pub fn get_buffer(&mut self) -> &mut [T] {
unsafe {
slice::from_raw_parts_mut(self.buffer_data, self.buffer_len)
}
unsafe { slice::from_raw_parts_mut(self.buffer_data, self.buffer_len) }
}
#[inline]
@ -472,8 +485,7 @@ impl<T> Buffer<T> {
let hresult = (*inner.render_client).ReleaseBuffer(self.frames as u32, 0);
match check_result(hresult) {
// ignoring the error that is produced if the device has been disconnected
Err(ref e)
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (),
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (),
e => e.unwrap(),
};
}
@ -481,69 +493,69 @@ impl<T> Buffer<T> {
}
fn format_to_waveformatextensible(format: &Format)
-> Result<winapi::WAVEFORMATEXTENSIBLE, CreationError>
{
-> Result<winapi::WAVEFORMATEXTENSIBLE, CreationError> {
Ok(winapi::WAVEFORMATEXTENSIBLE {
Format: winapi::WAVEFORMATEX {
wFormatTag: match format.data_type {
SampleFormat::I16 => winapi::WAVE_FORMAT_PCM,
SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
nChannels: format.channels.len() as winapi::WORD,
nSamplesPerSec: format.samples_rate.0 as winapi::DWORD,
nAvgBytesPerSec: format.channels.len() as winapi::DWORD *
format.samples_rate.0 as winapi::DWORD *
format.data_type.get_sample_size() as winapi::DWORD,
nBlockAlign: format.channels.len() as winapi::WORD *
format.data_type.get_sample_size() as winapi::WORD,
wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD,
cbSize: match format.data_type {
SampleFormat::I16 => 0,
SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() -
mem::size_of::<winapi::WAVEFORMATEX>()) as winapi::WORD,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
},
Samples: 8 * format.data_type.get_sample_size() as winapi::WORD,
dwChannelMask: {
let mut mask = 0;
for &channel in format.channels.iter() {
let raw_value = match channel {
ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT,
ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT,
ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER,
ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY,
ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT,
ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT,
ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER,
ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER,
ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER,
ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT,
ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT,
ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER,
ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT,
ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER,
ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT,
ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT,
ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER,
ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT,
};
Format: winapi::WAVEFORMATEX {
wFormatTag: match format.data_type {
SampleFormat::I16 => winapi::WAVE_FORMAT_PCM,
SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
nChannels: format.channels.len() as winapi::WORD,
nSamplesPerSec: format.samples_rate.0 as winapi::DWORD,
nAvgBytesPerSec: format.channels.len() as winapi::DWORD *
format.samples_rate.0 as winapi::DWORD *
format.data_type.get_sample_size() as winapi::DWORD,
nBlockAlign: format.channels.len() as winapi::WORD *
format.data_type.get_sample_size() as winapi::WORD,
wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD,
cbSize: match format.data_type {
SampleFormat::I16 => 0,
SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() -
mem::size_of::<winapi::WAVEFORMATEX>()) as
winapi::WORD,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
},
Samples: 8 * format.data_type.get_sample_size() as winapi::WORD,
dwChannelMask: {
let mut mask = 0;
for &channel in format.channels.iter() {
let raw_value = match channel {
ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT,
ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT,
ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER,
ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY,
ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT,
ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT,
ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER,
ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER,
ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER,
ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT,
ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT,
ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER,
ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT,
ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER,
ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT,
ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT,
ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER,
ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT,
};
// channels must be in the right order
if raw_value <= mask {
return Err(CreationError::FormatNotSupported);
}
// channels must be in the right order
if raw_value <= mask {
return Err(CreationError::FormatNotSupported);
}
mask = mask | raw_value;
}
mask = mask | raw_value;
}
mask
},
SubFormat: match format.data_type {
SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM,
SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
})
mask
},
SubFormat: match format.data_type {
SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM,
SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
})
}