General WASAPI cleanup

This commit is contained in:
Pierre Krieger 2015-09-22 13:25:42 +02:00
parent 3e4ced5fba
commit d0e5e7f4e6
1 changed files with 113 additions and 93 deletions

View File

@ -31,7 +31,6 @@ unsafe impl Sync for Voice {}
impl Voice { impl Voice {
pub fn new(end_point: &Endpoint, format: &Format) -> Result<Voice, CreationError> { pub fn new(end_point: &Endpoint, format: &Format) -> Result<Voice, CreationError> {
// FIXME: release everything
unsafe { unsafe {
// making sure that COM is initialized // making sure that COM is initialized
// it's not actually sure that this is required, but when in doubt do it // it's not actually sure that this is required, but when in doubt do it
@ -46,98 +45,42 @@ impl Voice {
// computing the format and initializing the device // computing the format and initializing the device
let format = { let format = {
let format_attempt = winapi::WAVEFORMATEXTENSIBLE { let format_attempt = try!(format_to_waveformatextensible(format));
Format: winapi::WAVEFORMATEX { let share_mode = winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED;
wFormatTag: match format.data_type {
SampleFormat::I16 => winapi::WAVE_FORMAT_PCM,
SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
nChannels: format.channels.len() as winapi::WORD,
nSamplesPerSec: format.samples_rate.0 as winapi::DWORD,
nAvgBytesPerSec: format.channels.len() as winapi::DWORD *
format.samples_rate.0 as winapi::DWORD *
format.data_type.get_sample_size() as winapi::DWORD,
nBlockAlign: format.channels.len() as winapi::WORD *
format.data_type.get_sample_size() as winapi::WORD,
wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD,
cbSize: match format.data_type {
SampleFormat::I16 => 0,
SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() -
mem::size_of::<winapi::WAVEFORMATEX>()) as winapi::WORD,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
},
Samples: 8 * format.data_type.get_sample_size() as winapi::WORD,
dwChannelMask: {
let mut mask = 0;
for &channel in format.channels.iter() {
let raw_value = match channel {
ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT,
ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT,
ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER,
ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY,
ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT,
ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT,
ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER,
ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER,
ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER,
ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT,
ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT,
ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER,
ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT,
ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER,
ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT,
ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT,
ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER,
ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT,
};
// channels must be in the right order // `IsFormatSupported` checks whether the format is supported and fills
if raw_value <= mask { // a `WAVEFORMATEX`
return Err(CreationError::FormatNotSupported); let mut dummy_fmt_ptr: *mut winapi::WAVEFORMATEX = mem::uninitialized();
let hresult = (*audio_client).IsFormatSupported(share_mode, &format_attempt.Format,
&mut dummy_fmt_ptr);
// we free that `WAVEFORMATEX` immediately after because we don't need it
if !dummy_fmt_ptr.is_null() {
ole32::CoTaskMemFree(dummy_fmt_ptr as *mut _);
} }
mask = mask | raw_value; // `IsFormatSupported` can return `S_FALSE` (which means that a compatible format
} // has been found) but we also treat this as an error
match (hresult, check_result(hresult)) {
mask (_, Err(ref e))
}, if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
SubFormat: match format.data_type {
SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM,
SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
};
let mut format_ptr: *mut winapi::WAVEFORMATEX = mem::uninitialized();
let hresult = (*audio_client).IsFormatSupported(winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED,
&format_attempt.Format, &mut format_ptr);
if !format_ptr.is_null() {
ole32::CoTaskMemFree(format_ptr as *mut _);
}
if hresult == winapi::S_FALSE {
return Err(CreationError::FormatNotSupported);
}
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{ {
(*audio_client).Release(); (*audio_client).Release();
return Err(CreationError::DeviceNotAvailable); return Err(CreationError::DeviceNotAvailable);
}, },
Err(e) => { (_, Err(e)) => {
(*audio_client).Release(); (*audio_client).Release();
panic!("{:?}", e); panic!("{:?}", e);
}, },
Ok(()) => (), (winapi::S_FALSE, _) => {
(*audio_client).Release();
return Err(CreationError::FormatNotSupported);
},
(_, Ok(())) => (),
}; };
let hresult = (*audio_client).Initialize(winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED, // finally initializing the audio client
0, 10000000, 0, &format_attempt.Format, ptr::null()); let hresult = (*audio_client).Initialize(share_mode, 0, 10000000, 0,
&format_attempt.Format, ptr::null());
match check_result(hresult) { match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) =>
{ {
@ -154,7 +97,7 @@ impl Voice {
format_attempt.Format format_attempt.Format
}; };
// // obtaining the size of the samples buffer in number of frames
let max_frames_in_buffer = { let max_frames_in_buffer = {
let mut max_frames_in_buffer = mem::uninitialized(); let mut max_frames_in_buffer = mem::uninitialized();
let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer); let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer);
@ -175,11 +118,12 @@ impl Voice {
max_frames_in_buffer max_frames_in_buffer
}; };
// // building a `IAudioRenderClient` that will be used to fill the samples buffer
let render_client = { let render_client = {
let mut render_client: *mut winapi::IAudioRenderClient = mem::uninitialized(); let mut render_client: *mut winapi::IAudioRenderClient = mem::uninitialized();
let hresult = (*audio_client).GetService(&winapi::IID_IAudioRenderClient, let hresult = (*audio_client).GetService(&winapi::IID_IAudioRenderClient,
&mut render_client as *mut *mut winapi::IAudioRenderClient &mut render_client
as *mut *mut winapi::IAudioRenderClient
as *mut _); as *mut _);
match check_result(hresult) { match check_result(hresult) {
@ -198,6 +142,7 @@ impl Voice {
&mut *render_client &mut *render_client
}; };
// everything went fine
Ok(Voice { Ok(Voice {
audio_client: audio_client, audio_client: audio_client,
render_client: render_client, render_client: render_client,
@ -232,7 +177,7 @@ impl Voice {
pub fn append_data<'a, T>(&'a mut self, max_elements: usize) -> Buffer<'a, T> { pub fn append_data<'a, T>(&'a mut self, max_elements: usize) -> Buffer<'a, T> {
unsafe { unsafe {
// // obtaining the number of frames that are available to be written
let frames_available = { let frames_available = {
let mut padding = mem::uninitialized(); let mut padding = mem::uninitialized();
let hresult = (*self.audio_client).GetCurrentPadding(&mut padding); let hresult = (*self.audio_client).GetCurrentPadding(&mut padding);
@ -240,25 +185,27 @@ impl Voice {
self.max_frames_in_buffer - padding self.max_frames_in_buffer - padding
}; };
// making sure `frames_available` is inferior to `max_elements`
let frames_available = cmp::min(frames_available, let frames_available = cmp::min(frames_available,
max_elements as u32 * mem::size_of::<T>() as u32 / max_elements as u32 * mem::size_of::<T>() as u32 /
self.bytes_per_frame as u32); self.bytes_per_frame as u32);
// the WASAPI has some weird behaviors when the buffer size is zero, so we handle this
// ourselves
if frames_available == 0 { if frames_available == 0 {
return Buffer::Empty; return Buffer::Empty;
} }
// loading buffer // obtaining a pointer to the buffer
let (buffer_data, buffer_len) = { let (buffer_data, buffer_len) = {
let mut buffer: *mut winapi::BYTE = mem::uninitialized(); let mut buffer: *mut winapi::BYTE = mem::uninitialized();
let hresult = (*self.render_client).GetBuffer(frames_available, let hresult = (*self.render_client).GetBuffer(frames_available,
&mut buffer as *mut *mut _); &mut buffer as *mut *mut _);
check_result(hresult).unwrap(); check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED`
assert!(!buffer.is_null()); debug_assert!(!buffer.is_null());
(buffer as *mut T, (buffer as *mut T,
frames_available as usize * self.bytes_per_frame as usize frames_available as usize * self.bytes_per_frame as usize / mem::size_of::<T>())
/ mem::size_of::<T>())
}; };
Buffer::Buffer { Buffer::Buffer {
@ -351,8 +298,81 @@ impl<'a, T> Buffer<'a, T> {
if let Buffer::Buffer { render_client, frames, .. } = self { if let Buffer::Buffer { render_client, frames, .. } = self {
unsafe { unsafe {
let hresult = (*render_client).ReleaseBuffer(frames as u32, 0); let hresult = (*render_client).ReleaseBuffer(frames as u32, 0);
check_result(hresult).unwrap(); match check_result(hresult) {
// ignoring the error that is produced if the device has been disconnected
Err(ref e)
if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (),
e => e.unwrap(),
};
} }
} }
} }
} }
fn format_to_waveformatextensible(format: &Format)
-> Result<winapi::WAVEFORMATEXTENSIBLE, CreationError>
{
Ok(winapi::WAVEFORMATEXTENSIBLE {
Format: winapi::WAVEFORMATEX {
wFormatTag: match format.data_type {
SampleFormat::I16 => winapi::WAVE_FORMAT_PCM,
SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
nChannels: format.channels.len() as winapi::WORD,
nSamplesPerSec: format.samples_rate.0 as winapi::DWORD,
nAvgBytesPerSec: format.channels.len() as winapi::DWORD *
format.samples_rate.0 as winapi::DWORD *
format.data_type.get_sample_size() as winapi::DWORD,
nBlockAlign: format.channels.len() as winapi::WORD *
format.data_type.get_sample_size() as winapi::WORD,
wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD,
cbSize: match format.data_type {
SampleFormat::I16 => 0,
SampleFormat::F32 => (mem::size_of::<winapi::WAVEFORMATEXTENSIBLE>() -
mem::size_of::<winapi::WAVEFORMATEX>()) as winapi::WORD,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
},
Samples: 8 * format.data_type.get_sample_size() as winapi::WORD,
dwChannelMask: {
let mut mask = 0;
for &channel in format.channels.iter() {
let raw_value = match channel {
ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT,
ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT,
ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER,
ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY,
ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT,
ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT,
ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER,
ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER,
ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER,
ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT,
ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT,
ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER,
ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT,
ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER,
ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT,
ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT,
ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER,
ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT,
};
// channels must be in the right order
if raw_value <= mask {
return Err(CreationError::FormatNotSupported);
}
mask = mask | raw_value;
}
mask
},
SubFormat: match format.data_type {
SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM,
SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
SampleFormat::U16 => return Err(CreationError::FormatNotSupported),
},
})
}