RFC: Rework the API [WIP] (#165)

* Rework the API to not use futures anymore

* Add some comments

* Update the MacOS backend

* Restore the null implementation

* Add an emscripten backend

* Remove erroneously added feature

* Fix to_f32 formula

* [WIP] Alsa backend

* Alsa backend compiling

* Working ALSA backend

* Fix tests

* Move WASAPI endpoint to endpoint module

* Fix WASAPI warnings

* Rework the WASAPI backend

* Check overflows for voice ID

* Add comments and minor fixes to WASAPI backend

* Add a changelog
This commit is contained in:
tomaka 2017-10-18 20:24:05 +02:00 committed by GitHub
parent 4b019243c2
commit 6ae01f437c
13 changed files with 1472 additions and 1447 deletions

8
CHANGELOG.md Normal file
View File

@ -0,0 +1,8 @@
# Unreleased (major)
- Removed the dependency on the `futures` library.
- Removed the `Voice` and `SamplesStream` types.
- Added `EventLoop::build_voice`, `EventLoop::destroy_voice`, `EventLoop::play`,
and `EventLoop::pause` that can be used to create, destroy, play and pause voices.
- Added a `VoiceId` struct that is now used to identify a voice owned by an `EventLoop`.
- Changed `EventLoop::run()` to take a callback that is called whenever a voice requires sound data.

View File

@ -9,7 +9,6 @@ license = "Apache-2.0"
keywords = ["audio", "sound"] keywords = ["audio", "sound"]
[dependencies] [dependencies]
futures = "0.1.1"
libc = "0.2" libc = "0.2"
lazy_static = "0.2" lazy_static = "0.2"

View File

@ -1,23 +1,9 @@
extern crate cpal; extern crate cpal;
extern crate futures;
use futures::stream::Stream;
use futures::task;
use futures::task::Executor;
use futures::task::Run;
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
struct MyExecutor;
impl Executor for MyExecutor {
fn execute(&self, r: Run) {
r.run();
}
}
fn main() { fn main() {
let endpoint = cpal::default_endpoint().expect("Failed to get default endpoint"); let endpoint = cpal::default_endpoint().expect("Failed to get default endpoint");
let format = endpoint let format = endpoint
@ -27,18 +13,15 @@ fn main() {
.expect("Failed to get endpoint format"); .expect("Failed to get endpoint format");
let event_loop = cpal::EventLoop::new(); let event_loop = cpal::EventLoop::new();
let executor = Arc::new(MyExecutor); let voice_id = event_loop.build_voice(&endpoint, &format).unwrap();
event_loop.play(voice_id);
let (mut voice, stream) = cpal::Voice::new(&endpoint, &format, &event_loop)
.expect("Failed to create a voice");
// Produce a sinusoid of maximum amplitude. // Produce a sinusoid of maximum amplitude.
let samples_rate = format.samples_rate.0 as f32; let samples_rate = format.samples_rate.0 as f32;
let mut data_source = (0u64..).map(move |t| t as f32 * 440.0 * 2.0 * 3.141592 / samples_rate) // 440 Hz let mut data_source = (0u64..).map(move |t| t as f32 * 440.0 * 2.0 * 3.141592 / samples_rate) // 440 Hz
.map(move |t| t.sin()); .map(move |t| t.sin());
voice.play(); event_loop.run(move |_, buffer| {
task::spawn(stream.for_each(move |buffer| -> Result<_, ()> {
match buffer { match buffer {
cpal::UnknownTypeBuffer::U16(mut buffer) => { cpal::UnknownTypeBuffer::U16(mut buffer) => {
for (sample, value) in buffer for (sample, value) in buffer
@ -75,16 +58,5 @@ fn main() {
} }
}, },
}; };
Ok(())
})).execute(executor);
thread::spawn(move || loop {
thread::sleep(Duration::from_millis(500));
voice.pause();
thread::sleep(Duration::from_millis(500));
voice.play();
}); });
event_loop.run();
} }

View File

@ -13,15 +13,9 @@ use UnknownTypeBuffer;
use std::{cmp, ffi, iter, mem, ptr}; use std::{cmp, ffi, iter, mem, ptr};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::vec::IntoIter as VecIntoIter; use std::vec::IntoIter as VecIntoIter;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
pub type SupportedFormatsIterator = VecIntoIter<Format>; pub type SupportedFormatsIterator = VecIntoIter<Format>;
mod enumerate; mod enumerate;
@ -256,205 +250,46 @@ impl Endpoint {
} }
pub struct EventLoop { pub struct EventLoop {
inner: Arc<EventLoopInner>, // Each newly-created voice gets a new ID from this counter. The counter is then incremented.
} next_voice_id: AtomicUsize, // TODO: use AtomicU64 when stable?
struct EventLoopInner { // A trigger that uses a `pipe()` as backend. Signalled whenever a new command is ready, so
// Descriptors that we are currently waiting upon. This member is always locked while `run()` // that `poll()` can wake up and pick the changes.
// is executed, ie. most of the time.
//
// Note that for `current_wait`, the first element of `descriptors` is always
// `pending_wait_signal`. Therefore the length of `descriptors` is always one more than
// `voices`.
current_wait: Mutex<PollDescriptors>,
// Since we can't add elements to `current_wait` (as it's locked), we add them to
// `pending_wait`. Once that's done, we signal `pending_wait_signal` so that the `run()`
// function can pause and add the content of `pending_wait` to `current_wait`.
pending_wait: Mutex<PollDescriptors>,
// A trigger that uses a `pipe` as backend. Always the first element
// of `current_wait.descriptors`. Should be notified when an element is added
// to `pending_wait` so that the current wait can stop and take the pending wait into
// account.
pending_trigger: Trigger, pending_trigger: Trigger,
// This field is locked by the `run()` method.
// The mutex also ensures that only one thread at a time has `run()` running.
run_context: Mutex<RunContext>,
// Commands processed by the `run()` method that is currently running.
// TODO: use a lock-free container
commands: Mutex<Vec<Command>>,
} }
struct PollDescriptors { unsafe impl Send for EventLoop {
// Descriptors to wait for. }
unsafe impl Sync for EventLoop {
}
enum Command {
NewVoice(VoiceInner),
DestroyVoice(VoiceId),
}
struct RunContext {
// Descriptors to wait for. Always contains `pending_trigger.read_fd()` as first element.
descriptors: Vec<libc::pollfd>, descriptors: Vec<libc::pollfd>,
// List of voices that are written in `descriptors`. // List of voices that are written in `descriptors`.
voices: Vec<Arc<VoiceInner>>, voices: Vec<VoiceInner>,
}
unsafe impl Send for EventLoopInner {
}
unsafe impl Sync for EventLoopInner {
}
impl EventLoop {
#[inline]
pub fn new() -> EventLoop {
let pending_trigger = Trigger::new();
EventLoop {
inner: Arc::new(EventLoopInner {
current_wait: Mutex::new(PollDescriptors {
descriptors: vec![
libc::pollfd {
fd: pending_trigger.read_fd(),
events: libc::POLLIN,
revents: 0,
},
],
voices: Vec::new(),
}),
pending_wait: Mutex::new(PollDescriptors {
descriptors: Vec::new(),
voices: Vec::new(),
}),
pending_trigger: pending_trigger,
}),
}
}
#[inline]
pub fn run(&self) {
unsafe {
let mut current_wait = self.inner.current_wait.lock().unwrap();
loop {
let ret = libc::poll(current_wait.descriptors.as_mut_ptr(),
current_wait.descriptors.len() as libc::nfds_t,
-1 /* infinite */);
assert!(ret >= 0, "poll() failed");
if ret == 0 {
continue;
}
// If the `pending_wait_signal` was signaled, add the pending waits to
// the current waits.
if current_wait.descriptors[0].revents != 0 {
current_wait.descriptors[0].revents = 0;
let mut pending = self.inner.pending_wait.lock().unwrap();
current_wait.descriptors.append(&mut pending.descriptors);
current_wait.voices.append(&mut pending.voices);
// Emptying the signal.
self.inner.pending_trigger.clear_pipe();
}
// Check each individual descriptor for events.
let mut i_voice = 0;
let mut i_descriptor = 1;
while i_voice < current_wait.voices.len() {
let kind = {
let scheduled = current_wait.voices[i_voice].scheduled.lock().unwrap();
match *scheduled {
Some(ref scheduled) => scheduled.kind,
None => panic!("current wait unscheduled task"),
}
};
// Depending on the kind of scheduling the number of descriptors corresponding
// to the voice and the events associated are different
match kind {
ScheduledKind::WaitPCM => {
let mut revent = mem::uninitialized();
{
let channel = *current_wait.voices[i_voice].channel.lock().unwrap();
let num_descriptors =
current_wait.voices[i_voice].num_descriptors as libc::c_uint;
check_errors(alsa::snd_pcm_poll_descriptors_revents(channel, current_wait.descriptors
.as_mut_ptr().offset(i_descriptor),
num_descriptors, &mut revent)).unwrap();
}
if (revent as libc::c_short & libc::POLLOUT) != 0 {
let scheduled = current_wait.voices[i_voice]
.scheduled
.lock()
.unwrap()
.take();
scheduled.unwrap().task.unpark();
for _ in 0 .. current_wait.voices[i_voice].num_descriptors {
current_wait.descriptors.remove(i_descriptor as usize);
}
current_wait.voices.remove(i_voice);
} else {
i_descriptor += current_wait.voices[i_voice].num_descriptors as
isize;
i_voice += 1;
}
},
ScheduledKind::WaitResume => {
if current_wait.descriptors[i_descriptor as usize].revents != 0 {
// Unpark the task
let scheduled = current_wait.voices[i_voice]
.scheduled
.lock()
.unwrap()
.take();
scheduled.unwrap().task.unpark();
// Emptying the signal.
let mut out = 0u64;
let ret =
libc::read(current_wait.descriptors[i_descriptor as usize].fd,
&mut out as *mut u64 as *mut _,
8);
assert_eq!(ret, 8);
// Remove from current waiting poll descriptors
current_wait.descriptors.remove(i_descriptor as usize);
current_wait.voices.remove(i_voice);
} else {
i_descriptor += 1;
i_voice += 1;
}
},
}
}
}
}
}
}
pub struct Voice {
inner: Arc<VoiceInner>,
}
pub struct Buffer<T> {
inner: Arc<VoiceInner>,
buffer: Vec<T>,
}
pub struct SamplesStream {
inner: Arc<VoiceInner>,
}
pub struct Scheduled {
task: Task,
kind: ScheduledKind,
}
#[derive(Clone, Copy)]
pub enum ScheduledKind {
WaitResume,
WaitPCM,
} }
struct VoiceInner { struct VoiceInner {
// The event loop used to create the voice. // The id of the voice.
event_loop: Arc<EventLoopInner>, id: VoiceId,
// The ALSA channel. // The ALSA channel.
channel: Mutex<*mut alsa::snd_pcm_t>, channel: *mut alsa::snd_pcm_t,
// When converting between file descriptors and `snd_pcm_t`, this is the number of // When converting between file descriptors and `snd_pcm_t`, this is the number of
// file descriptors that this `snd_pcm_t` uses. // file descriptors that this `snd_pcm_t` uses.
@ -472,9 +307,6 @@ struct VoiceInner {
// Minimum number of samples to put in the buffer. // Minimum number of samples to put in the buffer.
period_len: usize, period_len: usize,
// If `Some`, something previously called `schedule` on the stream.
scheduled: Mutex<Option<Scheduled>>,
// Wherease the sample stream is paused // Wherease the sample stream is paused
is_paused: AtomicBool, is_paused: AtomicBool,
@ -483,159 +315,180 @@ struct VoiceInner {
resume_trigger: Trigger, resume_trigger: Trigger,
} }
unsafe impl Send for VoiceInner { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
} pub struct VoiceId(usize);
unsafe impl Sync for VoiceInner {
}
impl SamplesStream { impl EventLoop {
#[inline] #[inline]
fn schedule(&mut self, kind: ScheduledKind) { pub fn new() -> EventLoop {
unsafe { let pending_trigger = Trigger::new();
let channel = self.inner.channel.lock().unwrap();
// We start by filling `scheduled`. let run_context = Mutex::new(RunContext {
*self.inner.scheduled.lock().unwrap() = Some(Scheduled { descriptors: Vec::new(), // TODO: clearify in doc initial value not necessary
task: task::park(), voices: Vec::new(),
kind: kind,
}); });
let mut pending_wait = self.inner.event_loop.pending_wait.lock().unwrap(); EventLoop {
match kind { next_voice_id: AtomicUsize::new(0),
ScheduledKind::WaitPCM => { pending_trigger: pending_trigger,
// In this function we turn the `snd_pcm_t` into a collection of file descriptors. run_context,
// And we add these descriptors to `event_loop.pending_wait.descriptors`. commands: Mutex::new(Vec::new()),
pending_wait.descriptors.reserve(self.inner.num_descriptors); }
}
let len = pending_wait.descriptors.len(); #[inline]
let filled = alsa::snd_pcm_poll_descriptors(*channel, pub fn run<F>(&self, mut callback: F) -> !
pending_wait where F: FnMut(VoiceId, UnknownTypeBuffer)
.descriptors {
.as_mut_ptr() self.run_inner(&mut callback)
.offset(len as isize), }
self.inner.num_descriptors as
libc::c_uint); fn run_inner(&self, callback: &mut FnMut(VoiceId, UnknownTypeBuffer)) -> ! {
debug_assert_eq!(filled, self.inner.num_descriptors as libc::c_int); unsafe {
pending_wait let mut run_context = self.run_context.lock().unwrap();
.descriptors let run_context = &mut *run_context;
.set_len(len + self.inner.num_descriptors);
loop {
{
let mut commands_lock = self.commands.lock().unwrap();
if !commands_lock.is_empty() {
for command in commands_lock.drain(..) {
match command {
Command::DestroyVoice(voice_id) => {
run_context.voices.retain(|v| v.id != voice_id);
}, },
ScheduledKind::WaitResume => { Command::NewVoice(voice_inner) => {
// And we add the descriptor corresponding to the resume signal run_context.voices.push(voice_inner);
// to `event_loop.pending_wait.descriptors`. },
pending_wait.descriptors.push(libc::pollfd { }
fd: self.inner.resume_trigger.read_fd(), }
run_context.descriptors = vec![
libc::pollfd {
fd: self.pending_trigger.read_fd(),
events: libc::POLLIN, events: libc::POLLIN,
revents: 0, revents: 0,
});
},
} }
];
// We also fill `voices`. for voice in run_context.voices.iter() {
pending_wait.voices.push(self.inner.clone()); run_context.descriptors.reserve(voice.num_descriptors);
let len = run_context.descriptors.len();
// Now that `pending_wait` received additional descriptors, we signal the event let filled = alsa::snd_pcm_poll_descriptors(voice.channel,
// so that our event loops can pick it up. run_context.descriptors
drop(pending_wait); .as_mut_ptr()
self.inner.event_loop.pending_trigger.wakeup(); .offset(len as isize),
voice.num_descriptors as
libc::c_uint);
debug_assert_eq!(filled, voice.num_descriptors as libc::c_int);
run_context.descriptors.set_len(len + voice.num_descriptors);
}
} }
} }
}
impl Stream for SamplesStream { let ret = libc::poll(run_context.descriptors.as_mut_ptr(),
type Item = UnknownTypeBuffer; run_context.descriptors.len() as libc::nfds_t,
type Error = (); -1 /* infinite */);
assert!(ret >= 0, "poll() failed");
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { if ret == 0 {
// If paused then we schedule the task and return `NotReady` continue;
if self.inner.is_paused.load(Ordering::Relaxed) { }
self.schedule(ScheduledKind::WaitResume);
return Ok(Async::NotReady); // If the `pending_trigger` was signaled, we need to process the comands.
if run_context.descriptors[0].revents != 0 {
run_context.descriptors[0].revents = 0;
self.pending_trigger.clear_pipe();
}
// Iterate over each individual voice/descriptor.
let mut i_voice = 0;
let mut i_descriptor = 1;
while (i_descriptor as usize) < run_context.descriptors.len() {
let voice_inner = run_context.voices.get_mut(i_voice).unwrap();
// Check whether the event is `POLLOUT`. If not, `continue`.
{
let mut revent = mem::uninitialized();
{
let num_descriptors = voice_inner.num_descriptors as libc::c_uint;
check_errors(alsa::snd_pcm_poll_descriptors_revents(voice_inner.channel, run_context.descriptors
.as_mut_ptr().offset(i_descriptor),
num_descriptors, &mut revent)).unwrap();
}
if (revent as libc::c_short & libc::POLLOUT) == 0 {
i_descriptor += voice_inner.num_descriptors as isize;
i_voice += 1;
continue;
}
} }
// Determine the number of samples that are available to write. // Determine the number of samples that are available to write.
let available = { let available = {
let channel = self.inner.channel.lock().expect("could not lock channel"); let available = alsa::snd_pcm_avail(voice_inner.channel); // TODO: what about snd_pcm_avail_update?
let available = unsafe { alsa::snd_pcm_avail(*channel) }; // TODO: what about snd_pcm_avail_update?
if available == -32 { if available == -32 {
// buffer underrun // buffer underrun
self.inner.buffer_len voice_inner.buffer_len
} else if available < 0 { } else if available < 0 {
check_errors(available as libc::c_int).expect("buffer is not available"); check_errors(available as libc::c_int).expect("buffer is not available");
unreachable!() unreachable!()
} else { } else {
(available * self.inner.num_channels as alsa::snd_pcm_sframes_t) as usize (available * voice_inner.num_channels as alsa::snd_pcm_sframes_t) as usize
} }
}; };
// If we don't have one period ready, schedule the task and return `NotReady`. if available < voice_inner.period_len {
if available < self.inner.period_len { i_descriptor += voice_inner.num_descriptors as isize;
self.schedule(ScheduledKind::WaitPCM); i_voice += 1;
return Ok(Async::NotReady); continue;
} }
// We now sure that we're ready to write data. let voice_id = voice_inner.id.clone();
match self.inner.sample_format {
// We're now sure that we're ready to write data.
let buffer = match voice_inner.sample_format {
SampleFormat::I16 => { SampleFormat::I16 => {
let buffer = Buffer { let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }) voice_inner: voice_inner,
buffer: iter::repeat(mem::uninitialized())
.take(available) .take(available)
.collect(), .collect(),
inner: self.inner.clone(),
}; };
Ok(Async::Ready((Some(UnknownTypeBuffer::I16(::Buffer { target: Some(buffer) }))))) UnknownTypeBuffer::I16(::Buffer { target: Some(buffer) })
}, },
SampleFormat::U16 => { SampleFormat::U16 => {
let buffer = Buffer { let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }) voice_inner: voice_inner,
buffer: iter::repeat(mem::uninitialized())
.take(available) .take(available)
.collect(), .collect(),
inner: self.inner.clone(),
}; };
Ok(Async::Ready((Some(UnknownTypeBuffer::U16(::Buffer { target: Some(buffer) }))))) UnknownTypeBuffer::U16(::Buffer { target: Some(buffer) })
}, },
SampleFormat::F32 => { SampleFormat::F32 => {
let buffer = Buffer { let buffer = Buffer {
buffer: iter::repeat(unsafe { mem::uninitialized() }) voice_inner: voice_inner,
buffer: iter::repeat(0.0) // we don't use mem::uninitialized in case of sNaN
.take(available) .take(available)
.collect(), .collect(),
inner: self.inner.clone(),
}; };
Ok(Async::Ready((Some(UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) }))))) UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) })
}, },
} };
}
}
/// Wrapper around `hw_params`. callback(voice_id, buffer);
struct HwParams(*mut alsa::snd_pcm_hw_params_t); }
}
}
}
impl HwParams { pub fn build_voice(&self, endpoint: &Endpoint, format: &Format)
pub fn alloc() -> HwParams { -> Result<VoiceId, CreationError> {
unsafe {
let mut hw_params = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_malloc(&mut hw_params))
.expect("unable to get hardware parameters");
HwParams(hw_params)
}
}
}
impl Drop for HwParams {
fn drop(&mut self) {
unsafe {
alsa::snd_pcm_hw_params_free(self.0);
}
}
}
impl Voice {
pub fn new(endpoint: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError> {
unsafe { unsafe {
let name = ffi::CString::new(endpoint.0.clone()).expect("unable to clone endpoint"); let name = ffi::CString::new(endpoint.0.clone()).expect("unable to clone endpoint");
@ -647,11 +500,18 @@ impl Voice {
e => check_errors(e).expect("Device unavailable") e => check_errors(e).expect("Device unavailable")
} }
// TODO: check endianess let data_type = if cfg!(target_endian = "big") {
let data_type = match format.data_type { match format.data_type {
SampleFormat::I16 => alsa::SND_PCM_FORMAT_S16_BE,
SampleFormat::U16 => alsa::SND_PCM_FORMAT_U16_BE,
SampleFormat::F32 => alsa::SND_PCM_FORMAT_FLOAT_BE,
}
} else {
match format.data_type {
SampleFormat::I16 => alsa::SND_PCM_FORMAT_S16_LE, SampleFormat::I16 => alsa::SND_PCM_FORMAT_S16_LE,
SampleFormat::U16 => alsa::SND_PCM_FORMAT_U16_LE, SampleFormat::U16 => alsa::SND_PCM_FORMAT_U16_LE,
SampleFormat::F32 => alsa::SND_PCM_FORMAT_FLOAT_LE, SampleFormat::F32 => alsa::SND_PCM_FORMAT_FLOAT_LE,
}
}; };
let hw_params = HwParams::alloc(); let hw_params = HwParams::alloc();
@ -718,36 +578,68 @@ impl Voice {
num_descriptors as usize num_descriptors as usize
}; };
let samples_stream_inner = Arc::new(VoiceInner { let new_voice_id = VoiceId(self.next_voice_id.fetch_add(1, Ordering::Relaxed));
event_loop: event_loop.inner.clone(), assert_ne!(new_voice_id.0, usize::max_value()); // check for overflows
channel: Mutex::new(playback_handle),
let voice_inner = VoiceInner {
id: new_voice_id.clone(),
channel: playback_handle,
sample_format: format.data_type, sample_format: format.data_type,
num_descriptors: num_descriptors, num_descriptors: num_descriptors,
num_channels: format.channels.len() as u16, num_channels: format.channels.len() as u16,
buffer_len: buffer_len, buffer_len: buffer_len,
period_len: period_len, period_len: period_len,
scheduled: Mutex::new(None),
is_paused: AtomicBool::new(true), is_paused: AtomicBool::new(true),
resume_trigger: Trigger::new(), resume_trigger: Trigger::new(),
}); };
Ok((Voice { inner: samples_stream_inner.clone() }, self.commands.lock().unwrap().push(Command::NewVoice(voice_inner));
SamplesStream { inner: samples_stream_inner })) self.pending_trigger.wakeup();
Ok(new_voice_id)
} }
} }
#[inline] #[inline]
pub fn play(&mut self) { pub fn destroy_voice(&self, voice_id: VoiceId) {
// If it was paused then we resume and signal self.commands.lock().unwrap().push(Command::DestroyVoice(voice_id));
// FIXME: the signal is send even if the event loop wasn't waiting for resume, is that an issue ? self.pending_trigger.wakeup();
if self.inner.is_paused.swap(false, Ordering::Relaxed) {
self.inner.resume_trigger.wakeup();
}
} }
#[inline] #[inline]
pub fn pause(&mut self) { pub fn play(&self, _: VoiceId) {
self.inner.is_paused.store(true, Ordering::Relaxed); //unimplemented!()
}
#[inline]
pub fn pause(&self, _: VoiceId) {
unimplemented!()
}
}
pub struct Buffer<'a, T: 'a> {
voice_inner: &'a mut VoiceInner,
buffer: Vec<T>,
}
/// Wrapper around `hw_params`.
struct HwParams(*mut alsa::snd_pcm_hw_params_t);
impl HwParams {
pub fn alloc() -> HwParams {
unsafe {
let mut hw_params = mem::uninitialized();
check_errors(alsa::snd_pcm_hw_params_malloc(&mut hw_params))
.expect("unable to get hardware parameters");
HwParams(hw_params)
}
}
}
impl Drop for HwParams {
fn drop(&mut self) {
unsafe {
alsa::snd_pcm_hw_params_free(self.0);
}
} }
} }
@ -755,12 +647,12 @@ impl Drop for VoiceInner {
#[inline] #[inline]
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
alsa::snd_pcm_close(*self.channel.lock().expect("drop for voice")); alsa::snd_pcm_close(self.channel);
} }
} }
} }
impl<T> Buffer<T> { impl<'a, T> Buffer<'a, T> {
#[inline] #[inline]
pub fn buffer(&mut self) -> &mut [T] { pub fn buffer(&mut self) -> &mut [T] {
&mut self.buffer &mut self.buffer
@ -772,21 +664,17 @@ impl<T> Buffer<T> {
} }
pub fn finish(self) { pub fn finish(self) {
let to_write = (self.buffer.len() / self.inner.num_channels as usize) as let to_write = (self.buffer.len() / self.voice_inner.num_channels as usize) as
alsa::snd_pcm_uframes_t; alsa::snd_pcm_uframes_t;
let channel = self.inner
.channel
.lock()
.expect("Buffer channel lock failed");
unsafe { unsafe {
loop { loop {
let result = let result =
alsa::snd_pcm_writei(*channel, self.buffer.as_ptr() as *const _, to_write); alsa::snd_pcm_writei(self.voice_inner.channel, self.buffer.as_ptr() as *const _, to_write);
if result == -32 { if result == -32 {
// buffer underrun // buffer underrun
alsa::snd_pcm_prepare(*channel); alsa::snd_pcm_prepare(self.voice_inner.channel);
} else if result < 0 { } else if result < 0 {
check_errors(result as libc::c_int).expect("could not write pcm"); check_errors(result as libc::c_int).expect("could not write pcm");
} else { } else {

View File

@ -10,11 +10,7 @@ use SampleFormat;
use SamplesRate; use SamplesRate;
use UnknownTypeBuffer; use UnknownTypeBuffer;
use futures::Async; use std::mem;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
@ -49,27 +45,170 @@ impl Endpoint {
} }
} }
pub struct EventLoop; // The ID of a voice is its index within the `voices` array of the events loop.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct VoiceId(usize);
pub struct EventLoop {
// This `Arc` is shared with all the callbacks of coreaudio.
active_callbacks: Arc<ActiveCallbacks>,
voices: Mutex<Vec<Option<VoiceInner>>>,
}
struct ActiveCallbacks {
// Whenever the `run()` method is called with a callback, this callback is put in this list.
callbacks: Mutex<Vec<&'static mut FnMut(VoiceId, UnknownTypeBuffer)>>,
}
struct VoiceInner {
playing: bool,
audio_unit: AudioUnit,
}
impl EventLoop { impl EventLoop {
#[inline] #[inline]
pub fn new() -> EventLoop { pub fn new() -> EventLoop {
EventLoop EventLoop {
active_callbacks: Arc::new(ActiveCallbacks {
callbacks: Mutex::new(Vec::new()),
}),
voices: Mutex::new(Vec::new()),
} }
}
#[inline] #[inline]
pub fn run(&self) { pub fn run<F>(&self, mut callback: F) -> !
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
let callback: &mut FnMut(VoiceId, UnknownTypeBuffer) = &mut callback;
self.active_callbacks.callbacks.lock().unwrap().push(unsafe { mem::transmute(callback) });
loop { loop {
// So the loop does not get optimised out in --release // So the loop does not get optimised out in --release
thread::sleep(Duration::new(1u64, 0u32)); thread::sleep(Duration::new(1u64, 0u32));
} }
// Note: if we ever change this API so that `run` can return, then it is critical that
// we remove the callback from `active_callbacks`.
}
#[inline]
pub fn build_voice(&self, endpoint: &Endpoint, format: &Format)
-> Result<VoiceId, CreationError>
{
fn convert_error(err: coreaudio::Error) -> CreationError {
match err {
coreaudio::Error::RenderCallbackBufferFormatDoesNotMatchAudioUnitStreamFormat |
coreaudio::Error::NoKnownSubtype |
coreaudio::Error::AudioUnit(coreaudio::error::AudioUnitError::FormatNotSupported) |
coreaudio::Error::AudioCodec(_) |
coreaudio::Error::AudioFormat(_) => CreationError::FormatNotSupported,
_ => CreationError::DeviceNotAvailable,
}
}
let mut audio_unit = {
let au_type = if cfg!(target_os = "ios") {
// The DefaultOutput unit isn't available in iOS unfortunately. RemoteIO is a sensible replacement.
// See
// https://developer.apple.com/library/content/documentation/MusicAudio/Conceptual/AudioUnitHostingGuide_iOS/UsingSpecificAudioUnits/UsingSpecificAudioUnits.html
coreaudio::audio_unit::IOType::RemoteIO
} else {
coreaudio::audio_unit::IOType::DefaultOutput
};
AudioUnit::new(au_type).map_err(convert_error)?
};
// Determine the future ID of the voice.
let mut voices_lock = self.voices.lock().unwrap();
let voice_id = voices_lock.iter().position(|n| n.is_none()).unwrap_or(voices_lock.len());
// TODO: iOS uses integer and fixed-point data
// Register the callback that is being called by coreaudio whenever it needs data to be
// fed to the audio buffer.
let active_callbacks = self.active_callbacks.clone();
audio_unit.set_render_callback(move |mut args: render_callback::Args<data::NonInterleaved<f32>>| {
// If `run()` is currently running, then a callback will be available from this list.
// Otherwise, we just fill the buffer with zeroes and return.
let mut callbacks = active_callbacks.callbacks.lock().unwrap();
let callback = if let Some(cb) = callbacks.get_mut(0) {
cb
} else {
for channel in args.data.channels_mut() {
for elem in channel.iter_mut() {
*elem = 0.0;
}
}
return Ok(());
};
let buffer = {
let buffer_len = args.num_frames * args.data.channels().count();
Buffer {
args: &mut args,
buffer: vec![0.0; buffer_len],
}
};
callback(VoiceId(voice_id), UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) }));
Ok(())
}).map_err(convert_error)?;
// TODO: start playing now? is that consistent with the other backends?
audio_unit.start().map_err(convert_error)?;
// Add the voice to the list of voices within `self`.
{
let inner = VoiceInner {
playing: true,
audio_unit: audio_unit,
};
if voice_id == voices_lock.len() {
voices_lock.push(Some(inner));
} else {
voices_lock[voice_id] = Some(inner);
}
}
Ok(VoiceId(voice_id))
}
pub fn destroy_voice(&self, voice_id: VoiceId) {
let mut voices = self.voices.lock().unwrap();
voices[voice_id.0] = None;
}
pub fn play(&self, voice: VoiceId) {
let mut voices = self.voices.lock().unwrap();
let voice = voices[voice.0].as_mut().unwrap();
if !voice.playing {
voice.audio_unit.start().unwrap();
voice.playing = true;
}
}
pub fn pause(&self, voice: VoiceId) {
let mut voices = self.voices.lock().unwrap();
let voice = voices[voice.0].as_mut().unwrap();
if voice.playing {
voice.audio_unit.stop().unwrap();
voice.playing = false;
}
} }
} }
pub struct Buffer<T> { pub struct Buffer<'a, T: 'a> {
args: render_callback::Args<data::NonInterleaved<T>>, args: &'a mut render_callback::Args<data::NonInterleaved<T>>,
buffer: Vec<T>, buffer: Vec<T>,
} }
impl<T> Buffer<T> impl<'a, T> Buffer<'a, T>
where T: Sample where T: Sample
{ {
#[inline] #[inline]
@ -86,158 +225,11 @@ impl<T> Buffer<T>
pub fn finish(self) { pub fn finish(self) {
// TODO: At the moment this assumes the Vec<T> is a Vec<f32>. // TODO: At the moment this assumes the Vec<T> is a Vec<f32>.
// Need to add T: Sample and use Sample::to_vec_f32. // Need to add T: Sample and use Sample::to_vec_f32.
let Buffer { mut args, buffer } = self; let num_channels = self.args.data.channels().count();
for (i, frame) in self.buffer.chunks(num_channels).enumerate() {
let num_channels = args.data.channels().count(); for (channel, sample) in self.args.data.channels_mut().zip(frame.iter()) {
for (i, frame) in buffer.chunks(num_channels).enumerate() {
for (channel, sample) in args.data.channels_mut().zip(frame.iter()) {
channel[i] = *sample; channel[i] = *sample;
} }
} }
} }
} }
pub struct Voice {
playing: bool,
audio_unit: Arc<Mutex<AudioUnit>>,
}
#[allow(dead_code)] // the audio_unit will be dropped if we don't hold it.
pub struct SamplesStream {
inner: Arc<Mutex<SamplesStreamInner>>,
audio_unit: Arc<Mutex<AudioUnit>>,
}
struct SamplesStreamInner {
scheduled_task: Option<Task>,
current_callback: Option<render_callback::Args<data::NonInterleaved<f32>>>,
}
impl Stream for SamplesStream {
type Item = UnknownTypeBuffer;
type Error = ();
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
let mut inner = self.inner.lock().unwrap();
// There are two possibilites: either we're answering a callback of coreaudio and we return
// a buffer, or we're not answering a callback and we return that we're not ready.
let current_callback = match inner.current_callback.take() {
Some(c) => c,
None => {
inner.scheduled_task = Some(task::park());
return Ok(Async::NotReady);
},
};
let buffer_len = current_callback.num_frames * current_callback.data.channels().count();
let buffer = Buffer {
args: current_callback,
buffer: vec![0.0; buffer_len],
};
Ok(Async::Ready(Some(UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) }))))
}
}
impl Voice {
pub fn new(_: &Endpoint, _: &Format, _: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError> {
let inner = Arc::new(Mutex::new(SamplesStreamInner {
scheduled_task: None,
current_callback: None,
}));
fn convert_error(err: coreaudio::Error) -> CreationError {
match err {
coreaudio::Error::RenderCallbackBufferFormatDoesNotMatchAudioUnitStreamFormat |
coreaudio::Error::NoKnownSubtype |
coreaudio::Error::AudioUnit(coreaudio::error::AudioUnitError::FormatNotSupported) |
coreaudio::Error::AudioCodec(_) |
coreaudio::Error::AudioFormat(_) => CreationError::FormatNotSupported,
_ => CreationError::DeviceNotAvailable,
}
}
let au_type = if cfg!(target_os = "ios") {
// The DefaultOutput unit isn't available in iOS unfortunately. RemoteIO is a sensible replacement.
// See
// https://developer.apple.com/library/content/documentation/MusicAudio/Conceptual/AudioUnitHostingGuide_iOS/UsingSpecificAudioUnits/UsingSpecificAudioUnits.html
coreaudio::audio_unit::IOType::RemoteIO
} else {
coreaudio::audio_unit::IOType::DefaultOutput
};
let mut audio_unit = AudioUnit::new(au_type).map_err(convert_error)?;
// TODO: iOS uses integer and fixed-point data
{
let inner = inner.clone();
let result = audio_unit.set_render_callback(move |args| {
// This callback is entered whenever the coreaudio engine needs to be fed data.
// Store the callback argument in the `SamplesStreamInner` and return the task
// that we're supposed to notify.
let scheduled = {
let mut inner = inner.lock().unwrap();
assert!(inner.current_callback.is_none());
inner.current_callback = Some(args);
inner.scheduled_task.take()
};
// It is important that `inner` is unlocked here.
if let Some(scheduled) = scheduled {
// Calling `unpark()` should eventually call `poll()` on the `SamplesStream`,
// which will use the data we stored in `current_callback`.
scheduled.unpark();
}
// TODO: what should happen if the callback wasn't processed? in other word, what
// if the user didn't register any handler or did a stupid thing in the
// handler (like mem::forgetting the buffer)?
Ok(())
});
result.map_err(convert_error)?;
}
audio_unit.start().map_err(convert_error)?;
let au_arc = Arc::new(Mutex::new(audio_unit));
let samples_stream = SamplesStream {
inner: inner,
audio_unit: au_arc.clone(),
};
Ok((Voice {
playing: true,
audio_unit: au_arc.clone(),
},
samples_stream))
}
#[inline]
pub fn play(&mut self) {
if !self.playing {
let mut unit = self.audio_unit.lock().unwrap();
unit.start().unwrap();
self.playing = true;
}
}
#[inline]
pub fn pause(&mut self) {
if self.playing {
let mut unit = self.audio_unit.lock().unwrap();
unit.stop().unwrap();
self.playing = false;
}
}
}

252
src/emscripten/mod.rs Normal file
View File

@ -0,0 +1,252 @@
use std::marker::PhantomData;
use std::os::raw::c_char;
use std::os::raw::c_int;
use std::os::raw::c_void;
use CreationError;
use Format;
use FormatsEnumerationError;
use Sample;
use UnknownTypeBuffer;
extern {
fn emscripten_set_main_loop_arg(_: extern fn(*mut c_void), _: *mut c_void, _: c_int, _: c_int);
fn emscripten_run_script(script: *const c_char);
fn emscripten_run_script_int(script: *const c_char) -> c_int;
}
// The emscripten backend works by having a global variable named `_cpal_audio_contexts`, which
// is an array of `AudioContext` objects. A voice ID corresponds to an entry in this array.
//
// Creating a voice creates a new `AudioContext`. Destroying a voice destroys it.
// TODO: handle latency better ; right now we just use setInterval with the amount of sound data
// that is in each buffer ; this is obviously bad, and also the schedule is too tight and there may
// be underflows
pub struct EventLoop;
impl EventLoop {
#[inline]
pub fn new() -> EventLoop {
EventLoop
}
#[inline]
pub fn run<F>(&self, mut callback: F) -> !
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
unsafe {
// The `run` function uses `emscripten_set_main_loop_arg` to invoke a Rust callback
// repeatidely. The job of this callback is to fill the content of the audio buffers.
// The first argument of the callback function (a `void*`) is a casted pointer to the
// `callback` parameter that was passed to `run`.
extern "C" fn callback_fn<F>(callback_ptr: *mut c_void)
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
unsafe {
let num_contexts = emscripten_run_script_int("(function() {
if (window._cpal_audio_contexts)
return window._cpal_audio_contexts.length;
else
return 0;
})()\0".as_ptr() as *const _);
// TODO: this processes all the voices, even those from maybe other event loops
// this is not a problem yet, but may become one in the future?
for voice_id in 0 .. num_contexts {
let callback_ptr = &mut *(callback_ptr as *mut F);
let buffer = Buffer {
temporary_buffer: vec![0.0; 44100 * 2 / 3],
voice_id: voice_id,
marker: PhantomData,
};
callback_ptr(VoiceId(voice_id), ::UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) }));
}
}
}
let callback_ptr = &mut callback as *mut F as *mut c_void;
emscripten_set_main_loop_arg(callback_fn::<F>, callback_ptr, 3, 1);
unreachable!()
}
}
#[inline]
pub fn build_voice(&self, _: &Endpoint, format: &Format)
-> Result<VoiceId, CreationError>
{
// TODO: find an empty element in the array first, instead of pushing at the end, in case
// the user creates and destroys lots of voices?
let num = unsafe {
emscripten_run_script_int(concat!(r#"(function() {
if (!window._cpal_audio_contexts)
window._cpal_audio_contexts = new Array();
window._cpal_audio_contexts.push(new AudioContext());
return window._cpal_audio_contexts.length - 1;
})()"#, "\0").as_ptr() as *const _)
};
Ok(VoiceId(num))
}
#[inline]
pub fn destroy_voice(&self, voice_id: VoiceId) {
unsafe {
let script = format!("
if (window._cpal_audio_contexts)
window._cpal_audio_contexts[{}] = null;\0", voice_id.0);
emscripten_run_script(script.as_ptr() as *const _)
}
}
#[inline]
pub fn play(&self, voice_id: VoiceId) {
unsafe {
let script = format!("
if (window._cpal_audio_contexts)
if (window._cpal_audio_contexts[{v}])
window._cpal_audio_contexts[{v}].resume();\0", v = voice_id.0);
emscripten_run_script(script.as_ptr() as *const _)
}
}
#[inline]
pub fn pause(&self, voice_id: VoiceId) {
unsafe {
let script = format!("
if (window._cpal_audio_contexts)
if (window._cpal_audio_contexts[{v}])
window._cpal_audio_contexts[{v}].suspend();\0", v = voice_id.0);
emscripten_run_script(script.as_ptr() as *const _)
}
}
}
// Index within the `_cpal_audio_contexts` global variable in Javascript.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct VoiceId(c_int);
// Detects whether the `AudioContext` global variable is available.
fn is_webaudio_available() -> bool {
unsafe {
emscripten_run_script_int(concat!(r#"(function() {
if (!AudioContext) { return 0; } else { return 1; }
})()"#, "\0").as_ptr() as *const _) != 0
}
}
// Content is false if the iterator is empty.
pub struct EndpointsIterator(bool);
impl Default for EndpointsIterator {
fn default() -> EndpointsIterator {
// We produce an empty iterator if the WebAudio API isn't available.
EndpointsIterator(is_webaudio_available())
}
}
impl Iterator for EndpointsIterator {
type Item = Endpoint;
#[inline]
fn next(&mut self) -> Option<Endpoint> {
if self.0 {
self.0 = false;
Some(Endpoint)
} else {
None
}
}
}
#[inline]
pub fn default_endpoint() -> Option<Endpoint> {
if is_webaudio_available() {
Some(Endpoint)
} else {
None
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Endpoint;
impl Endpoint {
#[inline]
pub fn supported_formats(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
// TODO: right now cpal's API doesn't allow flexibility here
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
// this ever becomes more flexible, don't forget to change that
Ok(vec![Format {
channels: vec![::ChannelPosition::BackLeft, ::ChannelPosition::BackRight],
samples_rate: ::SamplesRate(44100),
data_type: ::SampleFormat::F32,
}].into_iter())
}
#[inline]
pub fn name(&self) -> String {
"Default endpoint".to_owned()
}
}
pub type SupportedFormatsIterator = ::std::vec::IntoIter<Format>;
pub struct Buffer<'a, T: 'a> where T: Sample {
temporary_buffer: Vec<T>,
voice_id: c_int,
marker: PhantomData<&'a mut T>,
}
impl<'a, T> Buffer<'a, T> where T: Sample {
#[inline]
pub fn buffer(&mut self) -> &mut [T] {
&mut self.temporary_buffer
}
#[inline]
pub fn len(&self) -> usize {
self.temporary_buffer.len()
}
#[inline]
pub fn finish(self) {
unsafe {
// TODO: **very** slow
let src_data = self.temporary_buffer.iter().map(|&b| b.to_f32().to_string() + ", ").fold(String::new(), |mut a, b| { a.push_str(&b); a });
debug_assert_eq!(self.temporary_buffer.len() % 2, 0); // TODO: num channels
let script = format!("(function() {{
if (!window._cpal_audio_contexts)
return;
var context = window._cpal_audio_contexts[{voice_id}];
if (!context)
return;
var buffer = context.createBuffer({num_channels}, {buf_len} / {num_channels}, 44100);
var src = [{src_data}];
for (var channel = 0; channel < {num_channels}; ++channel) {{
var buffer_content = buffer.getChannelData(channel);
for (var i = 0; i < {buf_len} / {num_channels}; ++i) {{
buffer_content[i] = src[i * {num_channels} + channel];
}}
}}
var node = context.createBufferSource();
node.buffer = buffer;
node.connect(context.destination);
node.start();
}})()\0",
num_channels = 2,
voice_id = self.voice_id,
buf_len = self.temporary_buffer.len(),
src_data = src_data);
emscripten_run_script(script.as_ptr() as *const _)
}
}
}

View File

@ -1,65 +1,33 @@
/*! /*!
# How to use cpal # How to use cpal
In order to play a sound, first you need to create an `EventLoop` and a `Voice`. In order to play a sound, first you need to create an `EventLoop` and a voice.
```no_run ```no_run
// getting the default sound output of the system (can return `None` if nothing is supported) // getting the default sound output of the system (can return `None` if nothing is supported)
let endpoint = cpal::get_default_endpoint().unwrap(); let endpoint = cpal::default_endpoint().unwrap();
// note that the user can at any moment disconnect the device, therefore all operations return // note that the user can at any moment disconnect the device, therefore all operations return
// a `Result` to handle this situation // a `Result` to handle this situation
// getting a format for the PCM // getting a format for the PCM
let format = endpoint.get_supported_formats_list().unwrap().next().unwrap(); let format = endpoint.supported_formats().unwrap().next().unwrap();
let event_loop = cpal::EventLoop::new(); let event_loop = cpal::EventLoop::new();
let (voice, mut samples_stream) = cpal::Voice::new(&endpoint, &format, &event_loop).unwrap(); let voice_id = event_loop.build_voice(&endpoint, &format).unwrap();
event_loop.play(voice_id);
``` ```
The `voice` can be used to control the play/pause of the output, while the `samples_stream` can `voice_id` is an identifier for the voice can be used to control the play/pause of the output.
be used to register a callback that will be called whenever the backend is ready to get data.
See the documentation of `futures-rs` for more info about how to use streams. Once that's done, you can call `run()` on the `event_loop`.
```no_run ```no_run
# extern crate futures; # let event_loop = cpal::EventLoop::new();
# extern crate cpal; event_loop.run(move |_voice_id, _buffer| {
# use std::sync::Arc;
use futures::stream::Stream;
use futures::task;
# struct MyExecutor;
# impl task::Executor for MyExecutor {
# fn execute(&self, r: task::Run) {
# r.run();
# }
# }
# fn main() {
# let mut samples_stream: cpal::SamplesStream = unsafe { std::mem::uninitialized() };
# let my_executor = Arc::new(MyExecutor);
task::spawn(samples_stream.for_each(move |buffer| -> Result<_, ()> {
// write data to `buffer` here // write data to `buffer` here
});
Ok(())
})).execute(my_executor);
# }
```
TODO: add example
After you have registered a callback, call `play`:
```no_run
# let mut voice: cpal::Voice = unsafe { std::mem::uninitialized() };
voice.play();
```
And finally, run the event loop:
```no_run
# let mut event_loop: cpal::EventLoop = unsafe { std::mem::uninitialized() };
event_loop.run();
``` ```
Calling `run()` will block the thread forever, so it's usually best done in a separate thread. Calling `run()` will block the thread forever, so it's usually best done in a separate thread.
@ -69,7 +37,6 @@ from time to time.
*/ */
extern crate futures;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
extern crate libc; extern crate libc;
@ -77,16 +44,13 @@ extern crate libc;
pub use samples_formats::{Sample, SampleFormat}; pub use samples_formats::{Sample, SampleFormat};
#[cfg(all(not(windows), not(target_os = "linux"), not(target_os = "freebsd"), #[cfg(all(not(windows), not(target_os = "linux"), not(target_os = "freebsd"),
not(target_os = "macos"), not(target_os = "ios")))] not(target_os = "macos"), not(target_os = "ios"), not(target_os = "emscripten")))]
use null as cpal_impl; use null as cpal_impl;
use std::error::Error; use std::error::Error;
use std::fmt; use std::fmt;
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use futures::Poll;
use futures::stream::Stream;
mod null; mod null;
mod samples_formats; mod samples_formats;
@ -102,6 +66,10 @@ mod cpal_impl;
#[path = "coreaudio/mod.rs"] #[path = "coreaudio/mod.rs"]
mod cpal_impl; mod cpal_impl;
#[cfg(target_os = "emscripten")]
#[path = "emscripten/mod.rs"]
mod cpal_impl;
/// An iterator for the list of formats that are supported by the backend. /// An iterator for the list of formats that are supported by the backend.
pub struct EndpointsIterator(cpal_impl::EndpointsIterator); pub struct EndpointsIterator(cpal_impl::EndpointsIterator);
@ -237,42 +205,94 @@ impl Iterator for SupportedFormatsIterator {
pub struct EventLoop(cpal_impl::EventLoop); pub struct EventLoop(cpal_impl::EventLoop);
impl EventLoop { impl EventLoop {
/// Initializes a new events loop.
#[inline] #[inline]
pub fn new() -> EventLoop { pub fn new() -> EventLoop {
EventLoop(cpal_impl::EventLoop::new()) EventLoop(cpal_impl::EventLoop::new())
} }
/// Creates a new voice that will play on the given endpoint and with the given format.
///
/// On success, returns an identifier for the voice.
#[inline] #[inline]
pub fn run(&self) { pub fn build_voice(&self, endpoint: &Endpoint, format: &Format)
self.0.run() -> Result<VoiceId, CreationError>
{
self.0.build_voice(&endpoint.0, format).map(VoiceId)
}
/// Destroys an existing voice.
///
/// # Panic
///
/// If the voice doesn't exist, this function can either panic or be a no-op.
///
#[inline]
pub fn destroy_voice(&self, voice_id: VoiceId) {
self.0.destroy_voice(voice_id.0)
}
/// Takes control of the current thread and processes the sounds.
///
/// Whenever a voice needs to be fed some data, the closure passed as parameter is called.
/// **Note**: Calling other methods of the events loop from the callback will most likely
/// deadlock. Don't do that. Maybe this will change in the future.
#[inline]
pub fn run<F>(&self, mut callback: F) -> !
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
self.0.run(move |id, buf| callback(VoiceId(id), buf))
}
/// Sends a command to the audio device that it should start playing.
///
/// Has no effect is the voice was already playing.
///
/// Only call this after you have submitted some data, otherwise you may hear
/// some glitches.
///
/// # Panic
///
/// If the voice doesn't exist, this function can either panic or be a no-op.
///
#[inline]
pub fn play(&self, voice: VoiceId) {
self.0.play(voice.0)
}
/// Sends a command to the audio device that it should stop playing.
///
/// Has no effect is the voice was already paused.
///
/// If you call `play` afterwards, the playback will resume exactly where it was.
///
/// # Panic
///
/// If the voice doesn't exist, this function can either panic or be a no-op.
///
#[inline]
pub fn pause(&self, voice: VoiceId) {
self.0.pause(voice.0)
} }
} }
/// Represents a buffer that must be filled with audio data. /// Identifier of a voice in an events loop.
/// #[derive(Debug, Clone, PartialEq, Eq, Hash)]
/// You should destroy this object as soon as possible. Data is only committed when it pub struct VoiceId(cpal_impl::VoiceId);
/// is destroyed.
#[must_use]
pub struct Buffer<T>
where T: Sample
{
// also contains something, taken by `Drop`
target: Option<cpal_impl::Buffer<T>>,
}
/// This is the struct that is provided to you by cpal when you want to write samples to a buffer. /// This is the struct that is provided to you by cpal when you want to write samples to a buffer.
/// ///
/// Since the type of data is only known at runtime, you have to fill the right buffer. /// Since the type of data is only known at runtime, you have to fill the right buffer.
pub enum UnknownTypeBuffer { pub enum UnknownTypeBuffer<'a> {
/// Samples whose format is `u16`. /// Samples whose format is `u16`.
U16(Buffer<u16>), U16(Buffer<'a, u16>),
/// Samples whose format is `i16`. /// Samples whose format is `i16`.
I16(Buffer<i16>), I16(Buffer<'a, i16>),
/// Samples whose format is `f32`. /// Samples whose format is `f32`.
F32(Buffer<f32>), F32(Buffer<'a, f32>),
} }
impl UnknownTypeBuffer { impl<'a> UnknownTypeBuffer<'a> {
/// Returns the length of the buffer in number of samples. /// Returns the length of the buffer in number of samples.
#[inline] #[inline]
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -343,111 +363,20 @@ impl Error for CreationError {
} }
} }
/// Controls a sound output. A typical application has one `Voice` for each sound /// Represents a buffer that must be filled with audio data.
/// it wants to output.
/// ///
/// A voice must be periodically filled with new data by calling `append_data`, or the sound /// You should destroy this object as soon as possible. Data is only committed when it
/// will stop playing. /// is destroyed.
/// #[must_use]
/// Each `Voice` is bound to a specific number of channels, samples rate, and samples format, pub struct Buffer<'a, T: 'a>
/// which can be retreived by calling `get_channels`, `get_samples_rate` and `get_samples_format`. where T: Sample
/// If you call `append_data` with values different than these, then cpal will automatically {
/// perform a conversion on your data. // Always contains something, taken by `Drop`
/// // TODO: change that
/// If you have the possibility, you should try to match the format of the voice. target: Option<cpal_impl::Buffer<'a, T>>,
pub struct Voice {
voice: cpal_impl::Voice,
format: Format,
} }
impl Voice { impl<'a, T> Deref for Buffer<'a, T>
/// Builds a new channel.
#[inline]
pub fn new(endpoint: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError> {
let (voice, stream) = cpal_impl::Voice::new(&endpoint.0, format, &event_loop.0)?;
let voice = Voice {
voice: voice,
format: format.clone(),
};
let stream = SamplesStream(stream);
Ok((voice, stream))
}
/// Returns the format used by the voice.
#[inline]
pub fn format(&self) -> &Format {
&self.format
}
/// DEPRECATED: use `format` instead. Returns the number of channels.
///
/// You can add data with any number of channels, but matching the voice's native format
/// will lead to better performances.
#[deprecated]
#[inline]
pub fn get_channels(&self) -> ChannelsCount {
self.format().channels.len() as ChannelsCount
}
/// DEPRECATED: use `format` instead. Returns the number of samples that are played per second.
///
/// You can add data with any samples rate, but matching the voice's native format
/// will lead to better performances.
#[deprecated]
#[inline]
pub fn get_samples_rate(&self) -> SamplesRate {
self.format().samples_rate
}
/// DEPRECATED: use `format` instead. Returns the format of the samples that are accepted by the backend.
///
/// You can add data of any format, but matching the voice's native format
/// will lead to better performances.
#[deprecated]
#[inline]
pub fn get_samples_format(&self) -> SampleFormat {
self.format().data_type
}
/// Sends a command to the audio device that it should start playing.
///
/// Has no effect is the voice was already playing.
///
/// Only call this after you have submitted some data, otherwise you may hear
/// some glitches.
#[inline]
pub fn play(&mut self) {
self.voice.play()
}
/// Sends a command to the audio device that it should stop playing.
///
/// Has no effect is the voice was already paused.
///
/// If you call `play` afterwards, the playback will resume exactly where it was.
#[inline]
pub fn pause(&mut self) {
self.voice.pause()
}
}
pub struct SamplesStream(cpal_impl::SamplesStream);
impl Stream for SamplesStream {
type Item = UnknownTypeBuffer;
type Error = ();
#[inline]
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
self.0.poll()
}
}
impl<T> Deref for Buffer<T>
where T: Sample where T: Sample
{ {
type Target = [T]; type Target = [T];
@ -458,7 +387,7 @@ impl<T> Deref for Buffer<T>
} }
} }
impl<T> DerefMut for Buffer<T> impl<'a, T> DerefMut for Buffer<'a, T>
where T: Sample where T: Sample
{ {
#[inline] #[inline]
@ -467,7 +396,7 @@ impl<T> DerefMut for Buffer<T>
} }
} }
impl<T> Drop for Buffer<T> impl<'a, T> Drop for Buffer<'a, T>
where T: Sample where T: Sample
{ {
#[inline] #[inline]

View File

@ -2,10 +2,6 @@
use std::marker::PhantomData; use std::marker::PhantomData;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use CreationError; use CreationError;
use Format; use Format;
use FormatsEnumerationError; use FormatsEnumerationError;
@ -17,12 +13,40 @@ impl EventLoop {
pub fn new() -> EventLoop { pub fn new() -> EventLoop {
EventLoop EventLoop
} }
#[inline] #[inline]
pub fn run(&self) { pub fn run<F>(&self, _callback: F) -> !
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
loop { /* TODO: don't spin */ } loop { /* TODO: don't spin */ }
} }
#[inline]
pub fn build_voice(&self, _: &Endpoint, _: &Format)
-> Result<VoiceId, CreationError>
{
Err(CreationError::DeviceNotAvailable)
}
#[inline]
pub fn destroy_voice(&self, _: VoiceId) {
unreachable!()
}
#[inline]
pub fn play(&self, _: VoiceId) {
panic!()
}
#[inline]
pub fn pause(&self, _: VoiceId) {
panic!()
}
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct VoiceId;
#[derive(Default)] #[derive(Default)]
pub struct EndpointsIterator; pub struct EndpointsIterator;
@ -68,40 +92,11 @@ impl Iterator for SupportedFormatsIterator {
} }
} }
pub struct Voice; pub struct Buffer<'a, T: 'a> {
pub struct SamplesStream; marker: PhantomData<&'a mut T>,
impl Voice {
#[inline]
pub fn new(_: &Endpoint, _: &Format, _: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError> {
Err(CreationError::DeviceNotAvailable)
}
#[inline]
pub fn play(&mut self) {
}
#[inline]
pub fn pause(&mut self) {
}
} }
impl Stream for SamplesStream { impl<'a, T> Buffer<'a, T> {
type Item = UnknownTypeBuffer;
type Error = ();
#[inline]
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
Ok(Async::NotReady)
}
}
pub struct Buffer<T> {
marker: PhantomData<T>,
}
impl<T> Buffer<T> {
#[inline] #[inline]
pub fn buffer(&mut self) -> &mut [T] { pub fn buffer(&mut self) -> &mut [T] {
unreachable!() unreachable!()

View File

@ -35,6 +35,9 @@ pub unsafe trait Sample: Copy + Clone {
/// Returns the `SampleFormat` corresponding to this data type. /// Returns the `SampleFormat` corresponding to this data type.
// TODO: rename to `format()`. Requires a breaking change. // TODO: rename to `format()`. Requires a breaking change.
fn get_format() -> SampleFormat; fn get_format() -> SampleFormat;
/// Turns the sample into its equivalent as a floating-point.
fn to_f32(&self) -> f32;
} }
unsafe impl Sample for u16 { unsafe impl Sample for u16 {
@ -42,6 +45,11 @@ unsafe impl Sample for u16 {
fn get_format() -> SampleFormat { fn get_format() -> SampleFormat {
SampleFormat::U16 SampleFormat::U16
} }
#[inline]
fn to_f32(&self) -> f32 {
((*self as f32 / u16::max_value() as f32) - 0.5) * 2.0 // TODO: maybe wrong
}
} }
unsafe impl Sample for i16 { unsafe impl Sample for i16 {
@ -49,6 +57,15 @@ unsafe impl Sample for i16 {
fn get_format() -> SampleFormat { fn get_format() -> SampleFormat {
SampleFormat::I16 SampleFormat::I16
} }
#[inline]
fn to_f32(&self) -> f32 {
if *self < 0 {
*self as f32 / -(::std::i16::MIN as f32)
} else {
*self as f32 / ::std::i16::MAX as f32
}
}
} }
unsafe impl Sample for f32 { unsafe impl Sample for f32 {
@ -56,4 +73,9 @@ unsafe impl Sample for f32 {
fn get_format() -> SampleFormat { fn get_format() -> SampleFormat {
SampleFormat::F32 SampleFormat::F32
} }
#[inline]
fn to_f32(&self) -> f32 {
*self
}
} }

414
src/wasapi/endpoint.rs Normal file
View File

@ -0,0 +1,414 @@
use std::ffi::OsString;
use std::io::Error as IoError;
use std::mem;
use std::option::IntoIter as OptionIntoIter;
use std::os::windows::ffi::OsStringExt;
use std::ptr;
use std::slice;
use std::sync::{Arc, Mutex, MutexGuard};
use ChannelPosition;
use Format;
use FormatsEnumerationError;
use SampleFormat;
use SamplesRate;
use super::check_result;
use super::com;
use super::ole32;
use super::winapi;
pub type SupportedFormatsIterator = OptionIntoIter<Format>;
/// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers.
#[derive(Copy, Clone)]
struct IAudioClientWrapper(*mut winapi::IAudioClient);
unsafe impl Send for IAudioClientWrapper {
}
unsafe impl Sync for IAudioClientWrapper {
}
/// An opaque type that identifies an end point.
pub struct Endpoint {
device: *mut winapi::IMMDevice,
/// We cache an uninitialized `IAudioClient` so that we can call functions from it without
/// having to create/destroy audio clients all the time.
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
}
unsafe impl Send for Endpoint {
}
unsafe impl Sync for Endpoint {
}
impl Endpoint {
// TODO: this function returns a GUID of the endpoin
// instead it should use the property store and return the friendly name
pub fn name(&self) -> String {
unsafe {
let mut name_ptr = mem::uninitialized();
// can only fail if wrong params or out of memory
check_result((*self.device).GetId(&mut name_ptr)).unwrap();
// finding the length of the name
let mut len = 0;
while *name_ptr.offset(len) != 0 {
len += 1;
}
// building a slice containing the name
let name_slice = slice::from_raw_parts(name_ptr, len as usize);
// and turning it into a string
let name_string: OsString = OsStringExt::from_wide(name_slice);
ole32::CoTaskMemFree(name_ptr as *mut _);
name_string.into_string().unwrap()
}
}
#[inline]
fn from_immdevice(device: *mut winapi::IMMDevice) -> Endpoint {
Endpoint {
device: device,
future_audio_client: Arc::new(Mutex::new(None)),
}
}
/// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it.
fn ensure_future_audio_client(&self)
-> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
let mut lock = self.future_audio_client.lock().unwrap();
if lock.is_some() {
return Ok(lock);
}
let audio_client: *mut winapi::IAudioClient = unsafe {
let mut audio_client = mem::uninitialized();
let hresult = (*self.device).Activate(&winapi::IID_IAudioClient,
winapi::CLSCTX_ALL,
ptr::null_mut(),
&mut audio_client);
// can fail if the device has been disconnected since we enumerated it, or if
// the device doesn't support playback for some reason
check_result(hresult)?;
assert!(!audio_client.is_null());
audio_client as *mut _
};
*lock = Some(IAudioClientWrapper(audio_client));
Ok(lock)
}
/// Returns an uninitialized `IAudioClient`.
#[inline]
pub(crate) fn build_audioclient(&self) -> Result<*mut winapi::IAudioClient, IoError> {
let mut lock = self.ensure_future_audio_client()?;
let client = lock.unwrap().0;
*lock = None;
Ok(client)
}
pub fn supported_formats(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
// We always create voices in shared mode, therefore all samples go through an audio
// processor to mix them together.
// However there is no way to query the list of all formats that are supported by the
// audio processor, but one format is guaranteed to be supported, the one returned by
// `GetMixFormat`.
// initializing COM because we call `CoTaskMemFree`
com::com_initialized();
let lock = match self.ensure_future_audio_client() {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
return Err(FormatsEnumerationError::DeviceNotAvailable),
e => e.unwrap(),
};
let client = lock.unwrap().0;
unsafe {
let mut format_ptr = mem::uninitialized();
match check_result((*client).GetMixFormat(&mut format_ptr)) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(FormatsEnumerationError::DeviceNotAvailable);
},
Err(e) => panic!("{:?}", e),
Ok(()) => (),
};
let format = {
let (channels, data_type) = match (*format_ptr).wFormatTag {
winapi::WAVE_FORMAT_PCM => {
(vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
SampleFormat::I16)
},
winapi::WAVE_FORMAT_EXTENSIBLE => {
let format_ptr = format_ptr as *const winapi::WAVEFORMATEXTENSIBLE;
let channels = {
let mut channels = Vec::new();
let mask = (*format_ptr).dwChannelMask;
if (mask & winapi::SPEAKER_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::FrontLeft);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::FrontRight);
}
if (mask & winapi::SPEAKER_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::FrontCenter);
}
if (mask & winapi::SPEAKER_LOW_FREQUENCY) != 0 {
channels.push(ChannelPosition::LowFrequency);
}
if (mask & winapi::SPEAKER_BACK_LEFT) != 0 {
channels.push(ChannelPosition::BackLeft);
}
if (mask & winapi::SPEAKER_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::BackRight);
}
if (mask & winapi::SPEAKER_FRONT_LEFT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontLeftOfCenter);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontRightOfCenter);
}
if (mask & winapi::SPEAKER_BACK_CENTER) != 0 {
channels.push(ChannelPosition::BackCenter);
}
if (mask & winapi::SPEAKER_SIDE_LEFT) != 0 {
channels.push(ChannelPosition::SideLeft);
}
if (mask & winapi::SPEAKER_SIDE_RIGHT) != 0 {
channels.push(ChannelPosition::SideRight);
}
if (mask & winapi::SPEAKER_TOP_CENTER) != 0 {
channels.push(ChannelPosition::TopCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::TopFrontLeft);
}
if (mask & winapi::SPEAKER_TOP_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::TopFrontCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::TopFrontRight);
}
if (mask & winapi::SPEAKER_TOP_BACK_LEFT) != 0 {
channels.push(ChannelPosition::TopBackLeft);
}
if (mask & winapi::SPEAKER_TOP_BACK_CENTER) != 0 {
channels.push(ChannelPosition::TopBackCenter);
}
if (mask & winapi::SPEAKER_TOP_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::TopBackRight);
}
assert_eq!((*format_ptr).Format.nChannels as usize, channels.len());
channels
};
let format = {
fn cmp_guid(a: &winapi::GUID, b: &winapi::GUID) -> bool {
a.Data1 == b.Data1 && a.Data2 == b.Data2 && a.Data3 == b.Data3 &&
a.Data4 == b.Data4
}
if cmp_guid(&(*format_ptr).SubFormat,
&winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
{
SampleFormat::F32
} else if cmp_guid(&(*format_ptr).SubFormat,
&winapi::KSDATAFORMAT_SUBTYPE_PCM)
{
SampleFormat::I16
} else {
panic!("Unknown SubFormat GUID returned by GetMixFormat: {:?}",
(*format_ptr).SubFormat)
}
};
(channels, format)
},
f => panic!("Unknown data format returned by GetMixFormat: {:?}", f),
};
Format {
channels: channels,
samples_rate: SamplesRate((*format_ptr).nSamplesPerSec),
data_type: data_type,
}
};
ole32::CoTaskMemFree(format_ptr as *mut _);
Ok(Some(format).into_iter())
}
}
}
impl PartialEq for Endpoint {
#[inline]
fn eq(&self, other: &Endpoint) -> bool {
self.device == other.device
}
}
impl Eq for Endpoint {
}
impl Clone for Endpoint {
#[inline]
fn clone(&self) -> Endpoint {
unsafe {
(*self.device).AddRef();
}
Endpoint {
device: self.device,
future_audio_client: self.future_audio_client.clone(),
}
}
}
impl Drop for Endpoint {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.device).Release();
}
if let Some(client) = self.future_audio_client.lock().unwrap().take() {
unsafe {
(*client.0).Release();
}
}
}
}
lazy_static! {
static ref ENUMERATOR: Enumerator = {
// COM initialization is thread local, but we only need to have COM initialized in the
// thread we create the objects in
com::com_initialized();
// building the devices enumerator object
unsafe {
let mut enumerator: *mut winapi::IMMDeviceEnumerator = mem::uninitialized();
let hresult = ole32::CoCreateInstance(&winapi::CLSID_MMDeviceEnumerator,
ptr::null_mut(), winapi::CLSCTX_ALL,
&winapi::IID_IMMDeviceEnumerator,
&mut enumerator
as *mut *mut winapi::IMMDeviceEnumerator
as *mut _);
check_result(hresult).unwrap();
Enumerator(enumerator)
}
};
}
/// RAII object around `winapi::IMMDeviceEnumerator`.
struct Enumerator(*mut winapi::IMMDeviceEnumerator);
unsafe impl Send for Enumerator {
}
unsafe impl Sync for Enumerator {
}
impl Drop for Enumerator {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.0).Release();
}
}
}
/// WASAPI implementation for `EndpointsIterator`.
pub struct EndpointsIterator {
collection: *mut winapi::IMMDeviceCollection,
total_count: u32,
next_item: u32,
}
unsafe impl Send for EndpointsIterator {
}
unsafe impl Sync for EndpointsIterator {
}
impl Drop for EndpointsIterator {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.collection).Release();
}
}
}
impl Default for EndpointsIterator {
fn default() -> EndpointsIterator {
unsafe {
let mut collection: *mut winapi::IMMDeviceCollection = mem::uninitialized();
// can fail because of wrong parameters (should never happen) or out of memory
check_result((*ENUMERATOR.0).EnumAudioEndpoints(winapi::eRender,
winapi::DEVICE_STATE_ACTIVE,
&mut collection))
.unwrap();
let mut count = mem::uninitialized();
// can fail if the parameter is null, which should never happen
check_result((*collection).GetCount(&mut count)).unwrap();
EndpointsIterator {
collection: collection,
total_count: count,
next_item: 0,
}
}
}
}
impl Iterator for EndpointsIterator {
type Item = Endpoint;
fn next(&mut self) -> Option<Endpoint> {
if self.next_item >= self.total_count {
return None;
}
unsafe {
let mut device = mem::uninitialized();
// can fail if out of range, which we just checked above
check_result((*self.collection).Item(self.next_item, &mut device)).unwrap();
self.next_item += 1;
Some(Endpoint::from_immdevice(device))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let num = self.total_count - self.next_item;
let num = num as usize;
(num, Some(num))
}
}
pub fn default_endpoint() -> Option<Endpoint> {
unsafe {
let mut device = mem::uninitialized();
let hres = (*ENUMERATOR.0)
.GetDefaultAudioEndpoint(winapi::eRender, winapi::eConsole, &mut device);
if let Err(_err) = check_result(hres) {
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
}
Some(Endpoint::from_immdevice(device))
}
}

View File

@ -1,133 +0,0 @@
use super::Endpoint;
use super::check_result;
use super::com;
use super::ole32;
use super::winapi;
use std::mem;
use std::ptr;
lazy_static! {
static ref ENUMERATOR: Enumerator = {
// COM initialization is thread local, but we only need to have COM initialized in the
// thread we create the objects in
com::com_initialized();
// building the devices enumerator object
unsafe {
let mut enumerator: *mut winapi::IMMDeviceEnumerator = mem::uninitialized();
let hresult = ole32::CoCreateInstance(&winapi::CLSID_MMDeviceEnumerator,
ptr::null_mut(), winapi::CLSCTX_ALL,
&winapi::IID_IMMDeviceEnumerator,
&mut enumerator
as *mut *mut winapi::IMMDeviceEnumerator
as *mut _);
check_result(hresult).unwrap();
Enumerator(enumerator)
}
};
}
/// RAII object around `winapi::IMMDeviceEnumerator`.
struct Enumerator(*mut winapi::IMMDeviceEnumerator);
unsafe impl Send for Enumerator {
}
unsafe impl Sync for Enumerator {
}
impl Drop for Enumerator {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.0).Release();
}
}
}
/// WASAPI implementation for `EndpointsIterator`.
pub struct EndpointsIterator {
collection: *mut winapi::IMMDeviceCollection,
total_count: u32,
next_item: u32,
}
unsafe impl Send for EndpointsIterator {
}
unsafe impl Sync for EndpointsIterator {
}
impl Drop for EndpointsIterator {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.collection).Release();
}
}
}
impl Default for EndpointsIterator {
fn default() -> EndpointsIterator {
unsafe {
let mut collection: *mut winapi::IMMDeviceCollection = mem::uninitialized();
// can fail because of wrong parameters (should never happen) or out of memory
check_result((*ENUMERATOR.0).EnumAudioEndpoints(winapi::eRender,
winapi::DEVICE_STATE_ACTIVE,
&mut collection))
.unwrap();
let mut count = mem::uninitialized();
// can fail if the parameter is null, which should never happen
check_result((*collection).GetCount(&mut count)).unwrap();
EndpointsIterator {
collection: collection,
total_count: count,
next_item: 0,
}
}
}
}
impl Iterator for EndpointsIterator {
type Item = Endpoint;
fn next(&mut self) -> Option<Endpoint> {
if self.next_item >= self.total_count {
return None;
}
unsafe {
let mut device = mem::uninitialized();
// can fail if out of range, which we just checked above
check_result((*self.collection).Item(self.next_item, &mut device)).unwrap();
self.next_item += 1;
Some(Endpoint::from_immdevice(device))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let num = self.total_count - self.next_item;
let num = num as usize;
(num, Some(num))
}
}
pub fn default_endpoint() -> Option<Endpoint> {
unsafe {
let mut device = mem::uninitialized();
let hres = (*ENUMERATOR.0)
.GetDefaultAudioEndpoint(winapi::eRender, winapi::eConsole, &mut device);
if let Err(_err) = check_result(hres) {
return None; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise
}
Some(Endpoint::from_immdevice(device))
}
}

View File

@ -2,28 +2,13 @@ extern crate winapi;
extern crate ole32; extern crate ole32;
extern crate kernel32; extern crate kernel32;
use std::ffi::OsString;
use std::io::Error as IoError; use std::io::Error as IoError;
use std::mem;
use std::os::windows::ffi::OsStringExt;
use std::ptr;
use std::slice;
use std::sync::{Arc, Mutex, MutexGuard};
use ChannelPosition; pub use self::endpoint::{Endpoint, EndpointsIterator, default_endpoint, SupportedFormatsIterator};
use Format; pub use self::voice::{Buffer, EventLoop, VoiceId};
use FormatsEnumerationError;
use SampleFormat;
use SamplesRate;
pub use self::enumerate::{EndpointsIterator, default_endpoint};
pub use self::voice::{Buffer, EventLoop, SamplesStream, Voice};
pub use std::option::IntoIter as OptionIntoIter;
pub type SupportedFormatsIterator = OptionIntoIter<Format>;
mod com; mod com;
mod enumerate; mod endpoint;
mod voice; mod voice;
#[inline] #[inline]
@ -34,272 +19,3 @@ fn check_result(result: winapi::HRESULT) -> Result<(), IoError> {
Ok(()) Ok(())
} }
} }
/// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers.
#[derive(Copy, Clone)]
struct IAudioClientWrapper(*mut winapi::IAudioClient);
unsafe impl Send for IAudioClientWrapper {
}
unsafe impl Sync for IAudioClientWrapper {
}
/// An opaque type that identifies an end point.
pub struct Endpoint {
device: *mut winapi::IMMDevice,
/// We cache an uninitialized `IAudioClient` so that we can call functions from it without
/// having to create/destroy audio clients all the time.
future_audio_client: Arc<Mutex<Option<IAudioClientWrapper>>>, // TODO: add NonZero around the ptr
}
unsafe impl Send for Endpoint {
}
unsafe impl Sync for Endpoint {
}
impl Endpoint {
// TODO: this function returns a GUID of the endpoin
// instead it should use the property store and return the friendly name
pub fn name(&self) -> String {
unsafe {
let mut name_ptr = mem::uninitialized();
// can only fail if wrong params or out of memory
check_result((*self.device).GetId(&mut name_ptr)).unwrap();
// finding the length of the name
let mut len = 0;
while *name_ptr.offset(len) != 0 {
len += 1;
}
// building a slice containing the name
let name_slice = slice::from_raw_parts(name_ptr, len as usize);
// and turning it into a string
let name_string: OsString = OsStringExt::from_wide(name_slice);
ole32::CoTaskMemFree(name_ptr as *mut _);
name_string.into_string().unwrap()
}
}
#[inline]
fn from_immdevice(device: *mut winapi::IMMDevice) -> Endpoint {
Endpoint {
device: device,
future_audio_client: Arc::new(Mutex::new(None)),
}
}
/// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it.
fn ensure_future_audio_client(&self)
-> Result<MutexGuard<Option<IAudioClientWrapper>>, IoError> {
let mut lock = self.future_audio_client.lock().unwrap();
if lock.is_some() {
return Ok(lock);
}
let audio_client: *mut winapi::IAudioClient = unsafe {
let mut audio_client = mem::uninitialized();
let hresult = (*self.device).Activate(&winapi::IID_IAudioClient,
winapi::CLSCTX_ALL,
ptr::null_mut(),
&mut audio_client);
// can fail if the device has been disconnected since we enumerated it, or if
// the device doesn't support playback for some reason
check_result(hresult)?;
assert!(!audio_client.is_null());
audio_client as *mut _
};
*lock = Some(IAudioClientWrapper(audio_client));
Ok(lock)
}
/// Returns an uninitialized `IAudioClient`.
#[inline]
fn build_audioclient(&self) -> Result<*mut winapi::IAudioClient, IoError> {
let mut lock = self.ensure_future_audio_client()?;
let client = lock.unwrap().0;
*lock = None;
Ok(client)
}
pub fn supported_formats(
&self)
-> Result<SupportedFormatsIterator, FormatsEnumerationError> {
// We always create voices in shared mode, therefore all samples go through an audio
// processor to mix them together.
// However there is no way to query the list of all formats that are supported by the
// audio processor, but one format is guaranteed to be supported, the one returned by
// `GetMixFormat`.
// initializing COM because we call `CoTaskMemFree`
com::com_initialized();
let lock = match self.ensure_future_audio_client() {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
return Err(FormatsEnumerationError::DeviceNotAvailable),
e => e.unwrap(),
};
let client = lock.unwrap().0;
unsafe {
let mut format_ptr = mem::uninitialized();
match check_result((*client).GetMixFormat(&mut format_ptr)) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => {
return Err(FormatsEnumerationError::DeviceNotAvailable);
},
Err(e) => panic!("{:?}", e),
Ok(()) => (),
};
let format = {
let (channels, data_type) = match (*format_ptr).wFormatTag {
winapi::WAVE_FORMAT_PCM => {
(vec![ChannelPosition::FrontLeft, ChannelPosition::FrontRight],
SampleFormat::I16)
},
winapi::WAVE_FORMAT_EXTENSIBLE => {
let format_ptr = format_ptr as *const winapi::WAVEFORMATEXTENSIBLE;
let channels = {
let mut channels = Vec::new();
let mask = (*format_ptr).dwChannelMask;
if (mask & winapi::SPEAKER_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::FrontLeft);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::FrontRight);
}
if (mask & winapi::SPEAKER_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::FrontCenter);
}
if (mask & winapi::SPEAKER_LOW_FREQUENCY) != 0 {
channels.push(ChannelPosition::LowFrequency);
}
if (mask & winapi::SPEAKER_BACK_LEFT) != 0 {
channels.push(ChannelPosition::BackLeft);
}
if (mask & winapi::SPEAKER_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::BackRight);
}
if (mask & winapi::SPEAKER_FRONT_LEFT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontLeftOfCenter);
}
if (mask & winapi::SPEAKER_FRONT_RIGHT_OF_CENTER) != 0 {
channels.push(ChannelPosition::FrontRightOfCenter);
}
if (mask & winapi::SPEAKER_BACK_CENTER) != 0 {
channels.push(ChannelPosition::BackCenter);
}
if (mask & winapi::SPEAKER_SIDE_LEFT) != 0 {
channels.push(ChannelPosition::SideLeft);
}
if (mask & winapi::SPEAKER_SIDE_RIGHT) != 0 {
channels.push(ChannelPosition::SideRight);
}
if (mask & winapi::SPEAKER_TOP_CENTER) != 0 {
channels.push(ChannelPosition::TopCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_LEFT) != 0 {
channels.push(ChannelPosition::TopFrontLeft);
}
if (mask & winapi::SPEAKER_TOP_FRONT_CENTER) != 0 {
channels.push(ChannelPosition::TopFrontCenter);
}
if (mask & winapi::SPEAKER_TOP_FRONT_RIGHT) != 0 {
channels.push(ChannelPosition::TopFrontRight);
}
if (mask & winapi::SPEAKER_TOP_BACK_LEFT) != 0 {
channels.push(ChannelPosition::TopBackLeft);
}
if (mask & winapi::SPEAKER_TOP_BACK_CENTER) != 0 {
channels.push(ChannelPosition::TopBackCenter);
}
if (mask & winapi::SPEAKER_TOP_BACK_RIGHT) != 0 {
channels.push(ChannelPosition::TopBackRight);
}
assert_eq!((*format_ptr).Format.nChannels as usize, channels.len());
channels
};
let format = {
fn cmp_guid(a: &winapi::GUID, b: &winapi::GUID) -> bool {
a.Data1 == b.Data1 && a.Data2 == b.Data2 && a.Data3 == b.Data3 &&
a.Data4 == b.Data4
}
if cmp_guid(&(*format_ptr).SubFormat,
&winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
{
SampleFormat::F32
} else if cmp_guid(&(*format_ptr).SubFormat,
&winapi::KSDATAFORMAT_SUBTYPE_PCM)
{
SampleFormat::I16
} else {
panic!("Unknown SubFormat GUID returned by GetMixFormat: {:?}",
(*format_ptr).SubFormat)
}
};
(channels, format)
},
f => panic!("Unknown data format returned by GetMixFormat: {:?}", f),
};
Format {
channels: channels,
samples_rate: SamplesRate((*format_ptr).nSamplesPerSec),
data_type: data_type,
}
};
ole32::CoTaskMemFree(format_ptr as *mut _);
Ok(Some(format).into_iter())
}
}
}
impl PartialEq for Endpoint {
#[inline]
fn eq(&self, other: &Endpoint) -> bool {
self.device == other.device
}
}
impl Eq for Endpoint {
}
impl Clone for Endpoint {
#[inline]
fn clone(&self) -> Endpoint {
unsafe {
(*self.device).AddRef();
}
Endpoint {
device: self.device,
future_audio_client: self.future_audio_client.clone(),
}
}
}
impl Drop for Endpoint {
#[inline]
fn drop(&mut self) {
unsafe {
(*self.device).Release();
}
if let Some(client) = self.future_audio_client.lock().unwrap().take() {
unsafe {
(*client.0).Release();
}
}
}
}

View File

@ -6,19 +6,15 @@ use super::kernel32;
use super::ole32; use super::ole32;
use super::winapi; use super::winapi;
use std::iter;
use std::marker::PhantomData;
use std::mem; use std::mem;
use std::ptr; use std::ptr;
use std::slice; use std::slice;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use futures::Async;
use futures::Poll;
use futures::stream::Stream;
use futures::task;
use futures::task::Task;
use ChannelPosition; use ChannelPosition;
use CreationError; use CreationError;
@ -27,45 +23,56 @@ use SampleFormat;
use UnknownTypeBuffer; use UnknownTypeBuffer;
pub struct EventLoop { pub struct EventLoop {
inner: Arc<EventLoopInner>, // Data used by the `run()` function implementation. The mutex is kept lock permanently by
} // `run()`. This ensures that two `run()` invocations can't run at the same time, and also
// means that we shouldn't try to lock this field from anywhere else but `run()`.
run_context: Mutex<RunContext>,
unsafe impl Send for EventLoop { // Identifier of the next voice to create. Each new voice increases this counter. If the
} // counter overflows, there's a panic.
unsafe impl Sync for EventLoop { // TODO: use AtomicU64 instead
} next_voice_id: AtomicUsize,
struct EventLoopInner { // Commands processed by the `run()` method that is currently running.
// List of handles that are currently being polled or that are going to be polled. This mutex // `pending_scheduled_event` must be signalled whenever a command is added here, so that it
// is locked for as long as the event loop is running. // will get picked up.
// // TODO: use a lock-free container
// In the `EventLoopScheduled`, the first handle in the list of handles is always commands: Mutex<Vec<Command>>,
// `pending_scheduled_event`. This means that the length of `handles` is always 1 + the length
// of `task_handles`.
// FIXME: no way to remove elements from that list?
scheduled: Mutex<EventLoopScheduled>,
// Since the above mutex is locked most of the time, we add new handles to this list instead. // This event is signalled after a new entry is added to `commands`, so that the `run()`
// After a new element is added to this list, you should notify `pending_scheduled_event` // method can be notified.
// so that they get transferred to `scheduled`.
//
// The length of `handles` and `task_handles` should always be equal.
pending_scheduled: Mutex<EventLoopScheduled>,
// This event is signalled after elements have been added to `pending_scheduled` in order to
// notify that they should be picked up.
pending_scheduled_event: winapi::HANDLE, pending_scheduled_event: winapi::HANDLE,
} }
struct EventLoopScheduled { struct RunContext {
// List of handles that correspond to voices. // Voices that have been created in this event loop.
// They are linked to `task_handles`, but we store them separately in order to easily call voices: Vec<VoiceInner>,
// `WaitForMultipleObjectsEx` on the array without having to perform any conversion.
handles: Vec<winapi::HANDLE>,
// List of task handles corresponding to `handles`. The second element is used to signal // Handles corresponding to the `event` field of each element of `voices`. Must always be in
// the voice that it has been signaled. // sync with `voices`, except that the first element is always `pending_scheduled_event`.
task_handles: Vec<(Task, Arc<AtomicBool>)>, handles: Vec<winapi::HANDLE>,
}
enum Command {
NewVoice(VoiceInner),
DestroyVoice(VoiceId),
Play(VoiceId),
Pause(VoiceId),
}
struct VoiceInner {
id: VoiceId,
audio_client: *mut winapi::IAudioClient,
render_client: *mut winapi::IAudioRenderClient,
// Event that is signalled by WASAPI whenever audio data must be written.
event: winapi::HANDLE,
// True if the voice is currently playing. False if paused.
playing: bool,
// Number of frames of audio data in the underlying buffer allocated by WASAPI.
max_frames_in_buffer: winapi::UINT32,
// Number of bytes that each frame occupies.
bytes_per_frame: winapi::WORD,
} }
impl EventLoop { impl EventLoop {
@ -74,103 +81,18 @@ impl EventLoop {
unsafe { kernel32::CreateEventA(ptr::null_mut(), 0, 0, ptr::null()) }; unsafe { kernel32::CreateEventA(ptr::null_mut(), 0, 0, ptr::null()) };
EventLoop { EventLoop {
inner: Arc::new(EventLoopInner {
pending_scheduled_event: pending_scheduled_event, pending_scheduled_event: pending_scheduled_event,
scheduled: Mutex::new(EventLoopScheduled { run_context: Mutex::new(RunContext {
voices: Vec::new(),
handles: vec![pending_scheduled_event], handles: vec![pending_scheduled_event],
task_handles: vec![],
}),
pending_scheduled: Mutex::new(EventLoopScheduled {
handles: vec![],
task_handles: vec![],
}),
}), }),
next_voice_id: AtomicUsize::new(0),
commands: Mutex::new(Vec::new()),
} }
} }
pub fn run(&self) { pub fn build_voice(&self, end_point: &Endpoint, format: &Format)
unsafe { -> Result<VoiceId, CreationError> {
let mut scheduled = self.inner.scheduled.lock().unwrap();
loop {
debug_assert!(scheduled.handles.len() == 1 + scheduled.task_handles.len());
// Creating a voice checks for the MAXIMUM_WAIT_OBJECTS limit.
// FIXME: this is not the case ^
debug_assert!(scheduled.handles.len() <= winapi::MAXIMUM_WAIT_OBJECTS as usize);
// Wait for any of the handles to be signalled, which means that the corresponding
// sound needs a buffer.
let result = kernel32::WaitForMultipleObjectsEx(scheduled.handles.len() as u32,
scheduled.handles.as_ptr(),
winapi::FALSE,
winapi::INFINITE, /* TODO: allow setting a timeout */
winapi::FALSE /* irrelevant parameter here */);
// Notifying the corresponding task handler.
assert!(result >= winapi::WAIT_OBJECT_0);
let handle_id = (result - winapi::WAIT_OBJECT_0) as usize;
if handle_id == 0 {
// The `pending_scheduled_event` handle has been notified, which means that we
// should pick up the content of `pending_scheduled`.
let mut pending = self.inner.pending_scheduled.lock().unwrap();
scheduled.handles.append(&mut pending.handles);
scheduled.task_handles.append(&mut pending.task_handles);
} else {
scheduled.handles.remove(handle_id);
let (task_handle, ready) = scheduled.task_handles.remove(handle_id - 1);
ready.store(true, Ordering::Relaxed);
task_handle.unpark();
}
}
}
}
}
impl Drop for EventLoop {
#[inline]
fn drop(&mut self) {
unsafe {
kernel32::CloseHandle(self.inner.pending_scheduled_event);
}
}
}
pub struct Voice {
inner: Arc<Mutex<VoiceInner>>,
playing: bool,
}
pub struct SamplesStream {
event_loop: Arc<EventLoopInner>,
inner: Arc<Mutex<VoiceInner>>,
// The event that is signalled whenever a buffer is ready to be submitted to the voice.
event: winapi::HANDLE, // TODO: not deleted
max_frames_in_buffer: winapi::UINT32,
bytes_per_frame: winapi::WORD,
ready: Arc<AtomicBool>,
}
unsafe impl Send for SamplesStream {
}
unsafe impl Sync for SamplesStream {
}
struct VoiceInner {
audio_client: *mut winapi::IAudioClient,
render_client: *mut winapi::IAudioRenderClient,
}
unsafe impl Send for Voice {
}
unsafe impl Sync for Voice {
}
impl Voice {
pub fn new(end_point: &Endpoint, format: &Format, event_loop: &EventLoop)
-> Result<(Voice, SamplesStream), CreationError> {
unsafe { unsafe {
// Making sure that COM is initialized. // Making sure that COM is initialized.
// It's not actually sure that this is required, but when in doubt do it. // It's not actually sure that this is required, but when in doubt do it.
@ -305,169 +227,218 @@ impl Voice {
&mut *render_client &mut *render_client
}; };
// Everything went fine. let new_voice_id = VoiceId(self.next_voice_id.fetch_add(1, Ordering::Relaxed));
let inner = Arc::new(Mutex::new(VoiceInner { assert_ne!(new_voice_id.0, usize::max_value()); // check for overflows
// Once we built the `VoiceInner`, we add a command that will be picked up by the
// `run()` method and added to the `RunContext`.
{
let inner = VoiceInner {
id: new_voice_id.clone(),
audio_client: audio_client, audio_client: audio_client,
render_client: render_client, render_client: render_client,
}));
let voice = Voice {
inner: inner.clone(),
playing: false,
};
let samples_stream = SamplesStream {
event_loop: event_loop.inner.clone(),
inner: inner,
event: event, event: event,
playing: false,
max_frames_in_buffer: max_frames_in_buffer, max_frames_in_buffer: max_frames_in_buffer,
bytes_per_frame: format.nBlockAlign, bytes_per_frame: format.nBlockAlign,
ready: Arc::new(AtomicBool::new(false)),
}; };
Ok((voice, samples_stream)) self.commands.lock().unwrap().push(Command::NewVoice(inner));
let result = kernel32::SetEvent(self.pending_scheduled_event);
assert!(result != 0);
};
Ok(new_voice_id)
} }
} }
#[inline] #[inline]
pub fn play(&mut self) { pub fn destroy_voice(&self, voice_id: VoiceId) {
if !self.playing {
let mut inner = self.inner.lock().unwrap();
unsafe { unsafe {
let hresult = (*inner.audio_client).Start(); self.commands.lock().unwrap().push(Command::DestroyVoice(voice_id));
check_result(hresult).unwrap(); let result = kernel32::SetEvent(self.pending_scheduled_event);
}
}
self.playing = true;
}
#[inline]
pub fn pause(&mut self) {
if self.playing {
let mut inner = self.inner.lock().unwrap();
unsafe {
let hresult = (*inner.audio_client).Stop();
check_result(hresult).unwrap();
}
}
self.playing = false;
}
}
impl SamplesStream {
#[inline]
fn schedule(&mut self) {
let mut pending = self.event_loop.pending_scheduled.lock().unwrap();
pending.handles.push(self.event);
pending
.task_handles
.push((task::park(), self.ready.clone()));
drop(pending);
let result = unsafe { kernel32::SetEvent(self.event_loop.pending_scheduled_event) };
assert!(result != 0); assert!(result != 0);
} }
}
impl Stream for SamplesStream {
type Item = UnknownTypeBuffer;
type Error = ();
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
unsafe {
if self.ready.swap(false, Ordering::Relaxed) == false {
// Despite its name this function does not block, because we pass `0`.
let result = kernel32::WaitForSingleObject(self.event, 0);
// Park the task and returning if the event is not ready.
match result {
winapi::WAIT_OBJECT_0 => (),
winapi::WAIT_TIMEOUT => {
self.schedule();
return Ok(Async::NotReady);
},
_ => unreachable!(),
};
} }
// If we reach here, that means we're ready to accept new samples. #[inline]
pub fn run<F>(&self, mut callback: F) -> !
where F: FnMut(VoiceId, UnknownTypeBuffer)
{
self.run_inner(&mut callback);
}
let poll = { fn run_inner(&self, callback: &mut FnMut(VoiceId, UnknownTypeBuffer)) -> ! {
let mut inner = self.inner.lock().unwrap(); unsafe {
// We keep `run_context` locked forever, which guarantees that two invocations of
// `run()` cannot run simultaneously.
let mut run_context = self.run_context.lock().unwrap();
loop {
// Process the pending commands.
let mut commands_lock = self.commands.lock().unwrap();
for command in commands_lock.drain(..) {
match command {
Command::NewVoice(voice_inner) => {
let event = voice_inner.event;
run_context.voices.push(voice_inner);
run_context.handles.push(event);
},
Command::DestroyVoice(voice_id) => {
match run_context.voices.iter().position(|v| v.id == voice_id) {
None => continue,
Some(p) => {
run_context.handles.remove(p + 1);
run_context.voices.remove(p);
},
}
},
Command::Play(voice_id) => {
if let Some(v) = run_context.voices.get_mut(voice_id.0) {
if !v.playing {
let hresult = (*v.audio_client).Start();
check_result(hresult).unwrap();
v.playing = true;
}
}
},
Command::Pause(voice_id) => {
if let Some(v) = run_context.voices.get_mut(voice_id.0) {
if v.playing {
let hresult = (*v.audio_client).Stop();
check_result(hresult).unwrap();
v.playing = true;
}
}
},
}
}
drop(commands_lock);
// Wait for any of the handles to be signalled, which means that the corresponding
// sound needs a buffer.
debug_assert!(run_context.handles.len() <= winapi::MAXIMUM_WAIT_OBJECTS as usize);
let result = kernel32::WaitForMultipleObjectsEx(run_context.handles.len() as u32,
run_context.handles.as_ptr(),
winapi::FALSE,
winapi::INFINITE, /* TODO: allow setting a timeout */
winapi::FALSE /* irrelevant parameter here */);
// Notifying the corresponding task handler.
debug_assert!(result >= winapi::WAIT_OBJECT_0);
let handle_id = (result - winapi::WAIT_OBJECT_0) as usize;
// If `handle_id` is 0, then it's `pending_scheduled_event` that was signalled in
// order for us to pick up the pending commands.
// Otherwise, a voice needs data.
if handle_id >= 1 {
let voice = &mut run_context.voices[handle_id - 1];
let voice_id = voice.id.clone();
// Obtaining the number of frames that are available to be written. // Obtaining the number of frames that are available to be written.
let frames_available = { let frames_available = {
let mut padding = mem::uninitialized(); let mut padding = mem::uninitialized();
let hresult = (*inner.audio_client).GetCurrentPadding(&mut padding); let hresult = (*voice.audio_client).GetCurrentPadding(&mut padding);
check_result(hresult).unwrap(); check_result(hresult).unwrap();
self.max_frames_in_buffer - padding voice.max_frames_in_buffer - padding
}; };
if frames_available == 0 { if frames_available == 0 {
Ok(Async::NotReady) // TODO: can this happen?
} else { continue;
}
// Obtaining a pointer to the buffer. // Obtaining a pointer to the buffer.
let (buffer_data, buffer_len) = { let (buffer_data, buffer_len) = {
let mut buffer: *mut winapi::BYTE = mem::uninitialized(); let mut buffer: *mut winapi::BYTE = mem::uninitialized();
let hresult = (*inner.render_client) let hresult = (*voice.render_client)
.GetBuffer(frames_available, &mut buffer as *mut *mut _); .GetBuffer(frames_available, &mut buffer as *mut *mut _);
check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED` check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED`
debug_assert!(!buffer.is_null()); debug_assert!(!buffer.is_null());
(buffer as *mut _, (buffer as *mut _,
frames_available as usize * self.bytes_per_frame as usize / frames_available as usize * voice.bytes_per_frame as usize /
mem::size_of::<f32>()) // FIXME: correct size mem::size_of::<f32>()) // FIXME: correct size when not f32
}; };
let buffer = Buffer { let buffer = Buffer {
voice: self.inner.clone(), voice: voice,
buffer_data: buffer_data, buffer_data: buffer_data,
buffer_len: buffer_len, buffer_len: buffer_len,
frames: frames_available, frames: frames_available,
marker: PhantomData,
}; };
Ok(Async::Ready(Some(UnknownTypeBuffer::F32(::Buffer { let buffer = UnknownTypeBuffer::F32(::Buffer { target: Some(buffer) }); // FIXME: not always f32
target: Some(buffer), callback(voice_id, buffer);
})))) // FIXME: not necessarily F32 }
}
} }
};
if let Ok(Async::NotReady) = poll {
self.schedule();
} }
poll #[inline]
pub fn play(&self, voice: VoiceId) {
unsafe {
self.commands.lock().unwrap().push(Command::Play(voice));
let result = kernel32::SetEvent(self.pending_scheduled_event);
assert!(result != 0);
}
}
#[inline]
pub fn pause(&self, voice: VoiceId) {
unsafe {
self.commands.lock().unwrap().push(Command::Pause(voice));
let result = kernel32::SetEvent(self.pending_scheduled_event);
assert!(result != 0);
} }
} }
} }
impl Drop for EventLoop {
#[inline]
fn drop(&mut self) {
unsafe {
kernel32::CloseHandle(self.pending_scheduled_event);
}
}
}
unsafe impl Send for EventLoop {
}
unsafe impl Sync for EventLoop {
}
// The content of a voice ID is a number that was fetched from `next_voice_id`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct VoiceId(usize);
impl Drop for VoiceInner { impl Drop for VoiceInner {
#[inline] #[inline]
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
(*self.render_client).Release(); (*self.render_client).Release();
(*self.audio_client).Release(); (*self.audio_client).Release();
kernel32::CloseHandle(self.event);
} }
} }
} }
pub struct Buffer<T> { pub struct Buffer<'a, T: 'a> {
voice: Arc<Mutex<VoiceInner>>, voice: &'a mut VoiceInner,
buffer_data: *mut T, buffer_data: *mut T,
buffer_len: usize, buffer_len: usize,
frames: winapi::UINT32, frames: winapi::UINT32,
marker: PhantomData<&'a mut [T]>,
} }
unsafe impl<T> Send for Buffer<T> { unsafe impl<'a, T> Send for Buffer<'a, T> {
} }
impl<T> Buffer<T> { impl<'a, T> Buffer<'a, T> {
#[inline] #[inline]
pub fn buffer(&mut self) -> &mut [T] { pub fn buffer(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.buffer_data, self.buffer_len) } unsafe { slice::from_raw_parts_mut(self.buffer_data, self.buffer_len) }
@ -481,10 +452,9 @@ impl<T> Buffer<T> {
#[inline] #[inline]
pub fn finish(self) { pub fn finish(self) {
unsafe { unsafe {
let mut inner = self.voice.lock().unwrap(); let hresult = (*self.voice.render_client).ReleaseBuffer(self.frames as u32, 0);
let hresult = (*inner.render_client).ReleaseBuffer(self.frames as u32, 0);
match check_result(hresult) { match check_result(hresult) {
// ignoring the error that is produced if the device has been disconnected // Ignoring the error that is produced if the device has been disconnected.
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (), Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (),
e => e.unwrap(), e => e.unwrap(),
}; };
@ -492,6 +462,7 @@ impl<T> Buffer<T> {
} }
} }
// Turns a `Format` into a `WAVEFORMATEXTENSIBLE`.
fn format_to_waveformatextensible(format: &Format) fn format_to_waveformatextensible(format: &Format)
-> Result<winapi::WAVEFORMATEXTENSIBLE, CreationError> { -> Result<winapi::WAVEFORMATEXTENSIBLE, CreationError> {
Ok(winapi::WAVEFORMATEXTENSIBLE { Ok(winapi::WAVEFORMATEXTENSIBLE {
@ -505,10 +476,10 @@ fn format_to_waveformatextensible(format: &Format)
nSamplesPerSec: format.samples_rate.0 as winapi::DWORD, nSamplesPerSec: format.samples_rate.0 as winapi::DWORD,
nAvgBytesPerSec: format.channels.len() as winapi::DWORD * nAvgBytesPerSec: format.channels.len() as winapi::DWORD *
format.samples_rate.0 as winapi::DWORD * format.samples_rate.0 as winapi::DWORD *
format.data_type.get_sample_size() as winapi::DWORD, format.data_type.sample_size() as winapi::DWORD,
nBlockAlign: format.channels.len() as winapi::WORD * nBlockAlign: format.channels.len() as winapi::WORD *
format.data_type.get_sample_size() as winapi::WORD, format.data_type.sample_size() as winapi::WORD,
wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD, wBitsPerSample: 8 * format.data_type.sample_size() as winapi::WORD,
cbSize: match format.data_type { cbSize: match format.data_type {
SampleFormat::I16 => 0, SampleFormat::I16 => 0,
SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() - SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() -
@ -517,7 +488,7 @@ fn format_to_waveformatextensible(format: &Format)
SampleFormat::U16 => return Err(CreationError::FormatNotSupported), SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
}, },
}, },
Samples: 8 * format.data_type.get_sample_size() as winapi::WORD, Samples: 8 * format.data_type.sample_size() as winapi::WORD,
dwChannelMask: { dwChannelMask: {
let mut mask = 0; let mut mask = 0;
for &channel in format.channels.iter() { for &channel in format.channels.iter() {