From d0e5e7f4e6094ae81a62841b30514525ab04fa75 Mon Sep 17 00:00:00 2001 From: Pierre Krieger Date: Tue, 22 Sep 2015 13:25:42 +0200 Subject: [PATCH] General WASAPI cleanup --- src/wasapi/voice.rs | 206 ++++++++++++++++++++++++-------------------- 1 file changed, 113 insertions(+), 93 deletions(-) diff --git a/src/wasapi/voice.rs b/src/wasapi/voice.rs index be2b90a..52fcdaf 100644 --- a/src/wasapi/voice.rs +++ b/src/wasapi/voice.rs @@ -31,7 +31,6 @@ unsafe impl Sync for Voice {} impl Voice { pub fn new(end_point: &Endpoint, format: &Format) -> Result { - // FIXME: release everything unsafe { // making sure that COM is initialized // it's not actually sure that this is required, but when in doubt do it @@ -46,98 +45,42 @@ impl Voice { // computing the format and initializing the device let format = { - let format_attempt = winapi::WAVEFORMATEXTENSIBLE { - Format: winapi::WAVEFORMATEX { - wFormatTag: match format.data_type { - SampleFormat::I16 => winapi::WAVE_FORMAT_PCM, - SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE, - SampleFormat::U16 => return Err(CreationError::FormatNotSupported), - }, - nChannels: format.channels.len() as winapi::WORD, - nSamplesPerSec: format.samples_rate.0 as winapi::DWORD, - nAvgBytesPerSec: format.channels.len() as winapi::DWORD * - format.samples_rate.0 as winapi::DWORD * - format.data_type.get_sample_size() as winapi::DWORD, - nBlockAlign: format.channels.len() as winapi::WORD * - format.data_type.get_sample_size() as winapi::WORD, - wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD, - cbSize: match format.data_type { - SampleFormat::I16 => 0, - SampleFormat::F32 => (mem::size_of::() - - mem::size_of::()) as winapi::WORD, - SampleFormat::U16 => return Err(CreationError::FormatNotSupported), - }, - }, - Samples: 8 * format.data_type.get_sample_size() as winapi::WORD, - dwChannelMask: { - let mut mask = 0; - for &channel in format.channels.iter() { - let raw_value = match channel { - ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT, - ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT, - ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER, - ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY, - ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT, - ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT, - ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER, - ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER, - ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER, - ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT, - ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT, - ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER, - ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT, - ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER, - ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT, - ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT, - ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER, - ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT, - }; + let format_attempt = try!(format_to_waveformatextensible(format)); + let share_mode = winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED; - // channels must be in the right order - if raw_value <= mask { - return Err(CreationError::FormatNotSupported); - } - - mask = mask | raw_value; - } - - mask - }, - SubFormat: match format.data_type { - SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM, - SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, - SampleFormat::U16 => return Err(CreationError::FormatNotSupported), - }, - }; - - let mut format_ptr: *mut winapi::WAVEFORMATEX = mem::uninitialized(); - let hresult = (*audio_client).IsFormatSupported(winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED, - &format_attempt.Format, &mut format_ptr); - - if !format_ptr.is_null() { - ole32::CoTaskMemFree(format_ptr as *mut _); + // `IsFormatSupported` checks whether the format is supported and fills + // a `WAVEFORMATEX` + let mut dummy_fmt_ptr: *mut winapi::WAVEFORMATEX = mem::uninitialized(); + let hresult = (*audio_client).IsFormatSupported(share_mode, &format_attempt.Format, + &mut dummy_fmt_ptr); + // we free that `WAVEFORMATEX` immediately after because we don't need it + if !dummy_fmt_ptr.is_null() { + ole32::CoTaskMemFree(dummy_fmt_ptr as *mut _); } - if hresult == winapi::S_FALSE { - return Err(CreationError::FormatNotSupported); - } - - match check_result(hresult) { - Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => + // `IsFormatSupported` can return `S_FALSE` (which means that a compatible format + // has been found) but we also treat this as an error + match (hresult, check_result(hresult)) { + (_, Err(ref e)) + if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => { (*audio_client).Release(); return Err(CreationError::DeviceNotAvailable); }, - Err(e) => { + (_, Err(e)) => { (*audio_client).Release(); panic!("{:?}", e); }, - Ok(()) => (), + (winapi::S_FALSE, _) => { + (*audio_client).Release(); + return Err(CreationError::FormatNotSupported); + }, + (_, Ok(())) => (), }; - let hresult = (*audio_client).Initialize(winapi::AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED, - 0, 10000000, 0, &format_attempt.Format, ptr::null()); - + // finally initializing the audio client + let hresult = (*audio_client).Initialize(share_mode, 0, 10000000, 0, + &format_attempt.Format, ptr::null()); match check_result(hresult) { Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => { @@ -154,7 +97,7 @@ impl Voice { format_attempt.Format }; - // + // obtaining the size of the samples buffer in number of frames let max_frames_in_buffer = { let mut max_frames_in_buffer = mem::uninitialized(); let hresult = (*audio_client).GetBufferSize(&mut max_frames_in_buffer); @@ -175,12 +118,13 @@ impl Voice { max_frames_in_buffer }; - // + // building a `IAudioRenderClient` that will be used to fill the samples buffer let render_client = { let mut render_client: *mut winapi::IAudioRenderClient = mem::uninitialized(); let hresult = (*audio_client).GetService(&winapi::IID_IAudioRenderClient, - &mut render_client as *mut *mut winapi::IAudioRenderClient - as *mut _); + &mut render_client + as *mut *mut winapi::IAudioRenderClient + as *mut _); match check_result(hresult) { Err(ref e) if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => @@ -198,6 +142,7 @@ impl Voice { &mut *render_client }; + // everything went fine Ok(Voice { audio_client: audio_client, render_client: render_client, @@ -232,7 +177,7 @@ impl Voice { pub fn append_data<'a, T>(&'a mut self, max_elements: usize) -> Buffer<'a, T> { unsafe { - // + // obtaining the number of frames that are available to be written let frames_available = { let mut padding = mem::uninitialized(); let hresult = (*self.audio_client).GetCurrentPadding(&mut padding); @@ -240,25 +185,27 @@ impl Voice { self.max_frames_in_buffer - padding }; + // making sure `frames_available` is inferior to `max_elements` let frames_available = cmp::min(frames_available, max_elements as u32 * mem::size_of::() as u32 / self.bytes_per_frame as u32); + // the WASAPI has some weird behaviors when the buffer size is zero, so we handle this + // ourselves if frames_available == 0 { return Buffer::Empty; } - // loading buffer + // obtaining a pointer to the buffer let (buffer_data, buffer_len) = { let mut buffer: *mut winapi::BYTE = mem::uninitialized(); let hresult = (*self.render_client).GetBuffer(frames_available, - &mut buffer as *mut *mut _); - check_result(hresult).unwrap(); - assert!(!buffer.is_null()); + &mut buffer as *mut *mut _); + check_result(hresult).unwrap(); // FIXME: can return `AUDCLNT_E_DEVICE_INVALIDATED` + debug_assert!(!buffer.is_null()); (buffer as *mut T, - frames_available as usize * self.bytes_per_frame as usize - / mem::size_of::()) + frames_available as usize * self.bytes_per_frame as usize / mem::size_of::()) }; Buffer::Buffer { @@ -351,8 +298,81 @@ impl<'a, T> Buffer<'a, T> { if let Buffer::Buffer { render_client, frames, .. } = self { unsafe { let hresult = (*render_client).ReleaseBuffer(frames as u32, 0); - check_result(hresult).unwrap(); + match check_result(hresult) { + // ignoring the error that is produced if the device has been disconnected + Err(ref e) + if e.raw_os_error() == Some(winapi::AUDCLNT_E_DEVICE_INVALIDATED) => (), + e => e.unwrap(), + }; } } } } + +fn format_to_waveformatextensible(format: &Format) + -> Result +{ + Ok(winapi::WAVEFORMATEXTENSIBLE { + Format: winapi::WAVEFORMATEX { + wFormatTag: match format.data_type { + SampleFormat::I16 => winapi::WAVE_FORMAT_PCM, + SampleFormat::F32 => winapi::WAVE_FORMAT_EXTENSIBLE, + SampleFormat::U16 => return Err(CreationError::FormatNotSupported), + }, + nChannels: format.channels.len() as winapi::WORD, + nSamplesPerSec: format.samples_rate.0 as winapi::DWORD, + nAvgBytesPerSec: format.channels.len() as winapi::DWORD * + format.samples_rate.0 as winapi::DWORD * + format.data_type.get_sample_size() as winapi::DWORD, + nBlockAlign: format.channels.len() as winapi::WORD * + format.data_type.get_sample_size() as winapi::WORD, + wBitsPerSample: 8 * format.data_type.get_sample_size() as winapi::WORD, + cbSize: match format.data_type { + SampleFormat::I16 => 0, + SampleFormat::F32 => (mem::size_of::() - + mem::size_of::()) as winapi::WORD, + SampleFormat::U16 => return Err(CreationError::FormatNotSupported), + }, + }, + Samples: 8 * format.data_type.get_sample_size() as winapi::WORD, + dwChannelMask: { + let mut mask = 0; + for &channel in format.channels.iter() { + let raw_value = match channel { + ChannelPosition::FrontLeft => winapi::SPEAKER_FRONT_LEFT, + ChannelPosition::FrontRight => winapi::SPEAKER_FRONT_RIGHT, + ChannelPosition::FrontCenter => winapi::SPEAKER_FRONT_CENTER, + ChannelPosition::LowFrequency => winapi::SPEAKER_LOW_FREQUENCY, + ChannelPosition::BackLeft => winapi::SPEAKER_BACK_LEFT, + ChannelPosition::BackRight => winapi::SPEAKER_BACK_RIGHT, + ChannelPosition::FrontLeftOfCenter => winapi::SPEAKER_FRONT_LEFT_OF_CENTER, + ChannelPosition::FrontRightOfCenter => winapi::SPEAKER_FRONT_RIGHT_OF_CENTER, + ChannelPosition::BackCenter => winapi::SPEAKER_BACK_CENTER, + ChannelPosition::SideLeft => winapi::SPEAKER_SIDE_LEFT, + ChannelPosition::SideRight => winapi::SPEAKER_SIDE_RIGHT, + ChannelPosition::TopCenter => winapi::SPEAKER_TOP_CENTER, + ChannelPosition::TopFrontLeft => winapi::SPEAKER_TOP_FRONT_LEFT, + ChannelPosition::TopFrontCenter => winapi::SPEAKER_TOP_FRONT_CENTER, + ChannelPosition::TopFrontRight => winapi::SPEAKER_TOP_FRONT_RIGHT, + ChannelPosition::TopBackLeft => winapi::SPEAKER_TOP_BACK_LEFT, + ChannelPosition::TopBackCenter => winapi::SPEAKER_TOP_BACK_CENTER, + ChannelPosition::TopBackRight => winapi::SPEAKER_TOP_BACK_RIGHT, + }; + + // channels must be in the right order + if raw_value <= mask { + return Err(CreationError::FormatNotSupported); + } + + mask = mask | raw_value; + } + + mask + }, + SubFormat: match format.data_type { + SampleFormat::I16 => winapi::KSDATAFORMAT_SUBTYPE_PCM, + SampleFormat::F32 => winapi::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, + SampleFormat::U16 => return Err(CreationError::FormatNotSupported), + }, + }) +}