Rebase/Update webaudio PR for recent breaking changes

This rebases #372, addressing the recent changes introduced by #397, #395, and #371 in the process.

TODO:

- [ ] Complete implementation of `callback` and `playback` timestamps in the output stream callback.
This commit is contained in:
mitchmindtree 2020-05-21 16:05:13 +02:00
parent 6f43fc2b60
commit 640a1d39ed
2 changed files with 58 additions and 44 deletions

View File

@ -7,8 +7,10 @@ use self::wasm_bindgen::prelude::*;
use self::wasm_bindgen::JsCast; use self::wasm_bindgen::JsCast;
use self::web_sys::{AudioContext, AudioContextOptions}; use self::web_sys::{AudioContext, AudioContextOptions};
use crate::{ use crate::{
BuildStreamError, Data, DefaultFormatError, DeviceNameError, DevicesError, Format, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
PauseStreamError, PlayStreamError, StreamError, SupportedFormat, SupportedFormatsError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleRate,
StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
}; };
use std::ops::DerefMut; use std::ops::DerefMut;
use std::sync::{Arc, Mutex, RwLock}; use std::sync::{Arc, Mutex, RwLock};
@ -28,8 +30,8 @@ pub struct Stream {
on_ended_closures: Vec<Arc<RwLock<Option<Closure<dyn FnMut()>>>>>, on_ended_closures: Vec<Arc<RwLock<Option<Closure<dyn FnMut()>>>>>,
} }
pub type SupportedInputFormats = ::std::vec::IntoIter<SupportedFormat>; pub type SupportedInputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
pub type SupportedOutputFormats = ::std::vec::IntoIter<SupportedFormat>; pub type SupportedOutputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
impl Host { impl Host {
pub fn new() -> Result<Self, crate::HostUnavailable> { pub fn new() -> Result<Self, crate::HostUnavailable> {
@ -72,12 +74,16 @@ impl Device {
} }
#[inline] #[inline]
fn supported_input_formats(&self) -> Result<SupportedInputFormats, SupportedFormatsError> { fn supported_input_configs(
&self,
) -> Result<SupportedInputConfigs, SupportedStreamConfigsError> {
unimplemented!(); unimplemented!();
} }
#[inline] #[inline]
fn supported_output_formats(&self) -> Result<SupportedOutputFormats, SupportedFormatsError> { fn supported_output_configs(
&self,
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
// TODO: right now cpal's API doesn't allow flexibility here // TODO: right now cpal's API doesn't allow flexibility here
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if // "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
// this ever becomes more flexible, don't forget to change that // this ever becomes more flexible, don't forget to change that
@ -86,34 +92,34 @@ impl Device {
// //
// UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and // UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and
// filter out those that lay outside the range specified above. // filter out those that lay outside the range specified above.
Ok(vec![SupportedFormat { Ok(vec![SupportedStreamConfigRange {
channels: 2, channels: 2,
min_sample_rate: ::SampleRate(44100), min_sample_rate: SampleRate(44100),
max_sample_rate: ::SampleRate(44100), max_sample_rate: SampleRate(44100),
data_type: ::SampleFormat::F32, sample_format: ::SampleFormat::F32,
}] }]
.into_iter()) .into_iter())
} }
#[inline] #[inline]
fn default_input_format(&self) -> Result<Format, DefaultFormatError> { fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
unimplemented!(); unimplemented!();
} }
#[inline] #[inline]
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
// TODO: because it is hard coded, see supported_output_formats. // TODO: because it is hard coded, see supported_output_formats.
Ok(Format { Ok(SupportedStreamConfig {
channels: 2, channels: 2,
sample_rate: ::SampleRate(44100), sample_rate: ::SampleRate(44100),
data_type: ::SampleFormat::F32, sample_format: ::SampleFormat::F32,
}) })
} }
} }
impl DeviceTrait for Device { impl DeviceTrait for Device {
type SupportedInputFormats = SupportedInputFormats; type SupportedInputConfigs = SupportedInputConfigs;
type SupportedOutputFormats = SupportedOutputFormats; type SupportedOutputConfigs = SupportedOutputConfigs;
type Stream = Stream; type Stream = Stream;
#[inline] #[inline]
@ -122,37 +128,38 @@ impl DeviceTrait for Device {
} }
#[inline] #[inline]
fn supported_input_formats( fn supported_input_configs(
&self, &self,
) -> Result<Self::SupportedInputFormats, SupportedFormatsError> { ) -> Result<Self::SupportedInputConfigs, SupportedStreamConfigsError> {
Device::supported_input_formats(self) Device::supported_input_configs(self)
} }
#[inline] #[inline]
fn supported_output_formats( fn supported_output_configs(
&self, &self,
) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> { ) -> Result<Self::SupportedOutputConfigs, SupportedStreamConfigsError> {
Device::supported_output_formats(self) Device::supported_output_configs(self)
} }
#[inline] #[inline]
fn default_input_format(&self) -> Result<Format, DefaultFormatError> { fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
Device::default_input_format(self) Device::default_input_config(self)
} }
#[inline] #[inline]
fn default_output_format(&self) -> Result<Format, DefaultFormatError> { fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
Device::default_output_format(self) Device::default_output_config(self)
} }
fn build_input_stream_raw<D, E>( fn build_input_stream_raw<D, E>(
&self, &self,
_format: &Format, _config: &StreamConfig,
_sample_format: SampleFormat,
_data_callback: D, _data_callback: D,
_error_callback: E, _error_callback: E,
) -> Result<Self::Stream, BuildStreamError> ) -> Result<Self::Stream, BuildStreamError>
where where
D: FnMut(&Data) + Send + 'static, D: FnMut(&Data, &InputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static, E: FnMut(StreamError) + Send + 'static,
{ {
unimplemented!() unimplemented!()
@ -161,27 +168,28 @@ impl DeviceTrait for Device {
/// Create an output stream. /// Create an output stream.
fn build_output_stream_raw<D, E>( fn build_output_stream_raw<D, E>(
&self, &self,
format: &Format, config: &StreamConfig,
sample_format: SampleFormat,
data_callback: D, data_callback: D,
_error_callback: E, _error_callback: E,
) -> Result<Self::Stream, BuildStreamError> ) -> Result<Self::Stream, BuildStreamError>
where where
D: FnMut(&mut Data) + Send + 'static, D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static, E: FnMut(StreamError) + Send + 'static,
{ {
assert_eq!( assert_eq!(
format.data_type, sample_format,
SampleFormat::F32, SampleFormat::F32,
"WebAudio backend currently only supports `f32` data", "WebAudio backend currently only supports `f32` data",
); );
// Use a buffer period of 1/3s for this early proof of concept. // Use a buffer period of 1/3s for this early proof of concept.
let buffer_length = (format.sample_rate.0 as f64 / 3.0).round() as usize; let buffer_length = (config.sample_rate.0 as f64 / 3.0).round() as usize;
let data_callback = Arc::new(Mutex::new(Box::new(data_callback))); let data_callback = Arc::new(Mutex::new(Box::new(data_callback)));
// Create the WebAudio stream. // Create the WebAudio stream.
let mut stream_opts = AudioContextOptions::new(); let mut stream_opts = AudioContextOptions::new();
stream_opts.sample_rate(format.sample_rate.0 as f32); stream_opts.sample_rate(config.sample_rate.0 as f32);
let ctx = Arc::new( let ctx = Arc::new(
AudioContext::new_with_context_options(&stream_opts).map_err( AudioContext::new_with_context_options(&stream_opts).map_err(
|err| -> BuildStreamError { |err| -> BuildStreamError {
@ -201,21 +209,20 @@ impl DeviceTrait for Device {
// Create a set of closures / callbacks which will continuously fetch and schedule sample playback. // Create a set of closures / callbacks which will continuously fetch and schedule sample playback.
// Starting with two workers, eg a front and back buffer so that audio frames can be fetched in the background. // Starting with two workers, eg a front and back buffer so that audio frames can be fetched in the background.
for _i in 0..2 { for _i in 0..2 {
let format = format.clone();
let data_callback_handle = data_callback.clone(); let data_callback_handle = data_callback.clone();
let ctx_handle = ctx.clone(); let ctx_handle = ctx.clone();
let time_handle = time.clone(); let time_handle = time.clone();
// A set of temporary buffers to be used for intermediate sample transformation steps. // A set of temporary buffers to be used for intermediate sample transformation steps.
let mut temporary_buffer = vec![0f32; buffer_length * format.channels as usize]; let mut temporary_buffer = vec![0f32; buffer_length * config.channels as usize];
let mut temporary_channel_buffer = vec![0f32; buffer_length]; let mut temporary_channel_buffer = vec![0f32; buffer_length];
// Create a webaudio buffer which will be reused to avoid allocations. // Create a webaudio buffer which will be reused to avoid allocations.
let ctx_buffer = ctx let ctx_buffer = ctx
.create_buffer( .create_buffer(
format.channels as u32, config.channels as u32,
buffer_length as u32, buffer_length as u32,
format.sample_rate.0 as f32, config.sample_rate.0 as f32,
) )
.map_err(|err| -> BuildStreamError { .map_err(|err| -> BuildStreamError {
let description = format!("{:?}", err); let description = format!("{:?}", err);
@ -228,6 +235,9 @@ impl DeviceTrait for Device {
Arc::new(RwLock::new(None)); Arc::new(RwLock::new(None));
let on_ended_closure_handle = on_ended_closure.clone(); let on_ended_closure_handle = on_ended_closure.clone();
let n_channels = config.channels as usize;
let sample_rate = config.sample_rate.0 as f64;
on_ended_closure on_ended_closure
.write() .write()
.unwrap() .unwrap()
@ -252,16 +262,20 @@ impl DeviceTrait for Device {
let sample_format = SampleFormat::F32; let sample_format = SampleFormat::F32;
let mut data = unsafe { Data::from_parts(data, len, sample_format) }; let mut data = unsafe { Data::from_parts(data, len, sample_format) };
let mut data_callback = data_callback_handle.lock().unwrap(); let mut data_callback = data_callback_handle.lock().unwrap();
(data_callback.deref_mut())(&mut data); let callback = unimplemented!();
let playback = unimplemented!();
let timestamp = crate::OutputStreamTimestamp { callback, playback };
let info = OutputCallbackInfo { timestamp };
(data_callback.deref_mut())(&mut data, &info);
} }
// Deinterleave the sample data and copy into the audio context buffer. // Deinterleave the sample data and copy into the audio context buffer.
// We do not reference the audio context buffer directly eg getChannelData. // We do not reference the audio context buffer directly eg getChannelData.
// As wasm-bindgen only gives us a copy, not a direct reference. // As wasm-bindgen only gives us a copy, not a direct reference.
for channel in 0..(format.channels as usize) { for channel in 0..n_channels {
for i in 0..buffer_length { for i in 0..buffer_length {
temporary_channel_buffer[i] = temporary_channel_buffer[i] =
temporary_buffer[(format.channels as usize) * i + channel]; temporary_buffer[n_channels * i + channel];
} }
ctx_buffer ctx_buffer
.copy_to_channel(&mut temporary_channel_buffer, channel as i32) .copy_to_channel(&mut temporary_channel_buffer, channel as i32)
@ -293,8 +307,8 @@ impl DeviceTrait for Device {
.expect("Unable to start the webaudio buffer source"); .expect("Unable to start the webaudio buffer source");
// Keep track of when the next buffer worth of samples should be played. // Keep track of when the next buffer worth of samples should be played.
*time_handle.write().unwrap() = time_at_start_of_buffer *time_handle.write().unwrap() =
+ (buffer_length as f64 / format.sample_rate.0 as f64); time_at_start_of_buffer + (buffer_length as f64 / sample_rate);
}) as Box<dyn FnMut()>)); }) as Box<dyn FnMut()>));
on_ended_closures.push(on_ended_closure); on_ended_closures.push(on_ended_closure);

View File

@ -504,8 +504,8 @@ mod platform_impl {
mod platform_impl { mod platform_impl {
pub use crate::host::webaudio::{ pub use crate::host::webaudio::{
Device as WebAudioDevice, Devices as WebAudioDevices, Host as WebAudioHost, Device as WebAudioDevice, Devices as WebAudioDevices, Host as WebAudioHost,
Stream as WebAudioStream, SupportedInputFormats as WebAudioSupportedInputFormats, Stream as WebAudioStream, SupportedInputConfigs as WebAudioSupportedInputConfigs,
SupportedOutputFormats as WebAudioSupportedOutputFormats, SupportedOutputConfigs as WebAudioSupportedOutputConfigs,
}; };
impl_platform_host!(WebAudio webaudio "WebAudio"); impl_platform_host!(WebAudio webaudio "WebAudio");