Compare commits
2 Commits
webaudio-d
...
add-cmp-te
Author | SHA1 | Date |
---|---|---|
Rob Watson | a292246c20 | |
Rob Watson | a2e15c6f55 |
|
@ -127,25 +127,6 @@ jobs:
|
|||
- name: Build beep example
|
||||
run: cargo build --example beep --target ${{ matrix.target }} --features=wasm-bindgen
|
||||
|
||||
wasm32-wasi-test:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
target: [wasm32-wasi]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
- name: Build beep example
|
||||
run: cargo build --example beep --target ${{ matrix.target }}
|
||||
|
||||
windows-test:
|
||||
strategy:
|
||||
matrix:
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
# Version 0.12.1 (2020-07-23)
|
||||
|
||||
- Bugfix release to get the asio feature working again.
|
||||
|
||||
# Version 0.12.0 (2020-07-09)
|
||||
# Unreleased
|
||||
|
||||
- Large refactor removing the blocking EventLoop API.
|
||||
- Rename many `Format` types to `StreamConfig`:
|
||||
|
|
10
Cargo.toml
10
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "cpal"
|
||||
version = "0.12.1"
|
||||
version = "0.11.0"
|
||||
authors = ["The CPAL contributors", "Pierre Krieger <pierre.krieger1708@gmail.com>"]
|
||||
description = "Low-level cross-platform audio I/O library in pure Rust."
|
||||
repository = "https://github.com/rustaudio/cpal"
|
||||
|
@ -9,10 +9,12 @@ license = "Apache-2.0"
|
|||
keywords = ["audio", "sound"]
|
||||
|
||||
[features]
|
||||
asio = ["asio-sys", "num-traits"] # Only available on Windows. See README for setup instructions.
|
||||
asio = ["asio-sys"] # Only available on Windows. See README for setup instructions.
|
||||
|
||||
[dependencies]
|
||||
thiserror = "1.0.2"
|
||||
lazy_static = "1.3"
|
||||
num-traits = "0.2.6"
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "1.0.12"
|
||||
|
@ -21,10 +23,8 @@ ringbuf = "0.1.6"
|
|||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "profileapi", "std", "synchapi", "winbase", "winuser"] }
|
||||
asio-sys = { version = "0.2", path = "asio-sys", optional = true }
|
||||
num-traits = { version = "0.2.6", optional = true }
|
||||
asio-sys = { version = "0.1", path = "asio-sys", optional = true }
|
||||
parking_lot = "0.9"
|
||||
lazy_static = "1.3"
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd"))'.dependencies]
|
||||
alsa = "0.4.1"
|
||||
|
|
|
@ -26,10 +26,6 @@ Note that on Linux, the ALSA development files are required. These are provided
|
|||
as part of the `libasound2-dev` package on Debian and Ubuntu distributions and
|
||||
`alsa-lib-devel` on Fedora.
|
||||
|
||||
## Compiling for Web Assembly
|
||||
|
||||
If you are interested in using CPAL with WASM, please see [this guide](https://github.com/RustAudio/cpal/wiki/Setting-up-a-new-CPAL-WASM-project) in our Wiki which walks through setting up a new project from scratch.
|
||||
|
||||
## ASIO on Windows
|
||||
|
||||
[ASIO](https://en.wikipedia.org/wiki/Audio_Stream_Input/Output) is an audio
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
[package]
|
||||
name = "asio-sys"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
authors = ["Tom Gowan <tomrgowan@gmail.com>"]
|
||||
description = "Low-level interface and binding generation for the steinberg ASIO SDK."
|
||||
repository = "https://github.com/RustAudio/cpal/"
|
||||
repository = "https://github.com/tomaka/cpal"
|
||||
documentation = "https://docs.rs/asio-sys"
|
||||
license = "Apache-2.0"
|
||||
keywords = ["audio", "sound", "asio", "steinberg"]
|
||||
build = "build.rs"
|
||||
|
||||
[target.'cfg(any(target_os = "windows"))'.build-dependencies]
|
||||
bindgen = "0.54.0"
|
||||
bindgen = "0.51.0"
|
||||
walkdir = "2"
|
||||
cc = "1.0.25"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1.0.0"
|
||||
num-derive = "0.3"
|
||||
num-derive = "0.2"
|
||||
num-traits = "0.2"
|
||||
|
|
|
@ -19,7 +19,6 @@ pub enum AsioError {
|
|||
HardwareStuck,
|
||||
NoRate,
|
||||
ASE_NoMemory,
|
||||
InvalidBufferSize,
|
||||
UnknownError,
|
||||
}
|
||||
|
||||
|
@ -64,7 +63,6 @@ impl fmt::Display for AsioError {
|
|||
"sample clock or rate cannot be determined or is not present"
|
||||
),
|
||||
AsioError::ASE_NoMemory => write!(f, "not enough memory for completing the request"),
|
||||
AsioError::InvalidBufferSize => write!(f, "buffersize out of range for device"),
|
||||
AsioError::UnknownError => write!(f, "Error not in SDK"),
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +94,6 @@ impl Error for AsioError {
|
|||
AsioError::HardwareStuck => "hardware is not running when sample position is inquired",
|
||||
AsioError::NoRate => "sample clock or rate cannot be determined or is not present",
|
||||
AsioError::ASE_NoMemory => "not enough memory for completing the request",
|
||||
AsioError::InvalidBufferSize => "buffersize out of range for device",
|
||||
AsioError::UnknownError => "Error not in SDK",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -385,14 +385,6 @@ impl Driver {
|
|||
Ok(channel)
|
||||
}
|
||||
|
||||
/// Get the min and max supported buffersize of the driver.
|
||||
pub fn buffersize_range(&self) -> Result<(c_long, c_long), AsioError> {
|
||||
let buffer_sizes = asio_get_buffer_sizes()?;
|
||||
let min = buffer_sizes.min;
|
||||
let max = buffer_sizes.max;
|
||||
Ok((min, max))
|
||||
}
|
||||
|
||||
/// Get current sample rate of the driver.
|
||||
pub fn sample_rate(&self) -> Result<c_double, AsioError> {
|
||||
let mut rate: c_double = 0.0;
|
||||
|
@ -439,14 +431,8 @@ impl Driver {
|
|||
///
|
||||
/// This will destroy any already allocated buffers.
|
||||
///
|
||||
/// If buffersize is None then the preferred buffer size from ASIO is used,
|
||||
/// otherwise the desired buffersize is used if the requeted size is within
|
||||
/// the range of accepted buffersizes for the device.
|
||||
fn create_buffers(
|
||||
&self,
|
||||
buffer_infos: &mut [AsioBufferInfo],
|
||||
buffer_size: Option<i32>,
|
||||
) -> Result<c_long, AsioError> {
|
||||
/// The preferred buffer size from ASIO is used.
|
||||
fn create_buffers(&self, buffer_infos: &mut [AsioBufferInfo]) -> Result<c_long, AsioError> {
|
||||
let num_channels = buffer_infos.len();
|
||||
|
||||
// To pass as ai::ASIOCallbacks
|
||||
|
@ -463,17 +449,6 @@ impl Driver {
|
|||
);
|
||||
}
|
||||
|
||||
let buffer_size = match buffer_size {
|
||||
Some(v) => {
|
||||
if v <= buffer_sizes.max {
|
||||
v
|
||||
} else {
|
||||
return Err(AsioError::InvalidBufferSize);
|
||||
}
|
||||
}
|
||||
None => buffer_sizes.pref,
|
||||
};
|
||||
|
||||
// Ensure the driver is in the `Initialized` state.
|
||||
if let DriverState::Running = *state {
|
||||
state.stop()?;
|
||||
|
@ -485,27 +460,23 @@ impl Driver {
|
|||
asio_result!(ai::ASIOCreateBuffers(
|
||||
buffer_infos.as_mut_ptr() as *mut _,
|
||||
num_channels as i32,
|
||||
buffer_size,
|
||||
buffer_sizes.pref,
|
||||
&mut callbacks as *mut _ as *mut _,
|
||||
))?;
|
||||
}
|
||||
*state = DriverState::Prepared;
|
||||
|
||||
Ok(buffer_size)
|
||||
Ok(buffer_sizes.pref)
|
||||
}
|
||||
|
||||
/// Creates the streams.
|
||||
///
|
||||
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
|
||||
/// default buffersize for the device is used.
|
||||
///
|
||||
/// Both input and output streams need to be created together as a single slice of
|
||||
/// `ASIOBufferInfo`.
|
||||
fn create_streams(
|
||||
&self,
|
||||
mut input_buffer_infos: Vec<AsioBufferInfo>,
|
||||
mut output_buffer_infos: Vec<AsioBufferInfo>,
|
||||
buffer_size: Option<i32>,
|
||||
) -> Result<AsioStreams, AsioError> {
|
||||
let (input, output) = match (
|
||||
input_buffer_infos.is_empty(),
|
||||
|
@ -518,7 +489,7 @@ impl Driver {
|
|||
let mut all_buffer_infos = input_buffer_infos;
|
||||
all_buffer_infos.append(&mut output_buffer_infos);
|
||||
// Create the buffers. On success, split the output and input again.
|
||||
let buffer_size = self.create_buffers(&mut all_buffer_infos, buffer_size)?;
|
||||
let buffer_size = self.create_buffers(&mut all_buffer_infos)?;
|
||||
let output_buffer_infos = all_buffer_infos.split_off(split_point);
|
||||
let input_buffer_infos = all_buffer_infos;
|
||||
let input = Some(AsioStream {
|
||||
|
@ -533,7 +504,7 @@ impl Driver {
|
|||
}
|
||||
// Just input
|
||||
(false, true) => {
|
||||
let buffer_size = self.create_buffers(&mut input_buffer_infos, buffer_size)?;
|
||||
let buffer_size = self.create_buffers(&mut input_buffer_infos)?;
|
||||
let input = Some(AsioStream {
|
||||
buffer_infos: input_buffer_infos,
|
||||
buffer_size,
|
||||
|
@ -543,7 +514,7 @@ impl Driver {
|
|||
}
|
||||
// Just output
|
||||
(true, false) => {
|
||||
let buffer_size = self.create_buffers(&mut output_buffer_infos, buffer_size)?;
|
||||
let buffer_size = self.create_buffers(&mut output_buffer_infos)?;
|
||||
let input = None;
|
||||
let output = Some(AsioStream {
|
||||
buffer_infos: output_buffer_infos,
|
||||
|
@ -566,21 +537,17 @@ impl Driver {
|
|||
///
|
||||
/// `num_channels` is the desired number of input channels.
|
||||
///
|
||||
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
|
||||
/// default buffersize for the device is used.
|
||||
///
|
||||
/// This returns a full AsioStreams with both input and output if output was active.
|
||||
pub fn prepare_input_stream(
|
||||
&self,
|
||||
output: Option<AsioStream>,
|
||||
num_channels: usize,
|
||||
buffer_size: Option<i32>,
|
||||
) -> Result<AsioStreams, AsioError> {
|
||||
let input_buffer_infos = prepare_buffer_infos(true, num_channels);
|
||||
let output_buffer_infos = output
|
||||
.map(|output| output.buffer_infos)
|
||||
.unwrap_or_else(Vec::new);
|
||||
self.create_streams(input_buffer_infos, output_buffer_infos, buffer_size)
|
||||
self.create_streams(input_buffer_infos, output_buffer_infos)
|
||||
}
|
||||
|
||||
/// Prepare the output stream.
|
||||
|
@ -592,21 +559,17 @@ impl Driver {
|
|||
///
|
||||
/// `num_channels` is the desired number of output channels.
|
||||
///
|
||||
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
|
||||
/// default buffersize for the device is used.
|
||||
///
|
||||
/// This returns a full AsioStreams with both input and output if input was active.
|
||||
pub fn prepare_output_stream(
|
||||
&self,
|
||||
input: Option<AsioStream>,
|
||||
num_channels: usize,
|
||||
buffer_size: Option<i32>,
|
||||
) -> Result<AsioStreams, AsioError> {
|
||||
let input_buffer_infos = input
|
||||
.map(|input| input.buffer_infos)
|
||||
.unwrap_or_else(Vec::new);
|
||||
let output_buffer_infos = prepare_buffer_infos(false, num_channels);
|
||||
self.create_streams(input_buffer_infos, output_buffer_infos, buffer_size)
|
||||
self.create_streams(input_buffer_infos, output_buffer_infos)
|
||||
}
|
||||
|
||||
/// Releases buffers allocations.
|
||||
|
|
|
@ -11,7 +11,6 @@ fn main() -> Result<(), anyhow::Error> {
|
|||
for host_id in available_hosts {
|
||||
println!("{}", host_id.name());
|
||||
let host = cpal::host_from_id(host_id)?;
|
||||
|
||||
let default_in = host.default_input_device().map(|e| e.name().unwrap());
|
||||
let default_out = host.default_output_device().map(|e| e.name().unwrap());
|
||||
println!(" Default Input Device:\n {:?}", default_in);
|
||||
|
|
|
@ -3417,9 +3417,9 @@
|
|||
}
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.19",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
|
||||
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ=="
|
||||
"version": "4.17.15",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
|
||||
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
|
||||
},
|
||||
"loglevel": {
|
||||
"version": "1.6.7",
|
||||
|
|
|
@ -3,17 +3,16 @@ extern crate libc;
|
|||
|
||||
use self::alsa::poll::Descriptors;
|
||||
use crate::{
|
||||
BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data,
|
||||
DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo,
|
||||
PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError,
|
||||
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
|
||||
SupportedStreamConfigsError,
|
||||
BackendSpecificError, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError,
|
||||
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
|
||||
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
|
||||
SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
};
|
||||
use std::cmp;
|
||||
use std::convert::TryInto;
|
||||
use std::sync::Arc;
|
||||
use std::thread::{self, JoinHandle};
|
||||
use std::vec::IntoIter as VecIntoIter;
|
||||
use std::{cmp, mem};
|
||||
use traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
pub use self::enumerate::{default_input_device, default_output_device, Devices};
|
||||
|
@ -340,14 +339,6 @@ impl Device {
|
|||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let min_buffer_size = hw_params.get_buffer_size_min()?;
|
||||
let max_buffer_size = hw_params.get_buffer_size_max()?;
|
||||
|
||||
let buffer_size_range = SupportedBufferSize::Range {
|
||||
min: min_buffer_size as u32,
|
||||
max: max_buffer_size as u32,
|
||||
};
|
||||
|
||||
let mut output = Vec::with_capacity(
|
||||
supported_formats.len() * supported_channels.len() * sample_rates.len(),
|
||||
);
|
||||
|
@ -358,7 +349,6 @@ impl Device {
|
|||
channels: channels.clone(),
|
||||
min_sample_rate: SampleRate(min_rate as u32),
|
||||
max_sample_rate: SampleRate(max_rate as u32),
|
||||
buffer_size: buffer_size_range.clone(),
|
||||
sample_format: sample_format,
|
||||
});
|
||||
}
|
||||
|
@ -448,7 +438,6 @@ struct StreamInner {
|
|||
|
||||
#[allow(dead_code)]
|
||||
// Whether or not the hardware supports pausing the stream.
|
||||
// TODO: We need an API to expose this. See #197, #284.
|
||||
can_pause: bool,
|
||||
|
||||
// In the case that the device does not return valid timestamps via `get_htstamp`, this field
|
||||
|
@ -881,7 +870,7 @@ fn set_hw_params_from_format<'a>(
|
|||
config: &StreamConfig,
|
||||
sample_format: SampleFormat,
|
||||
) -> Result<alsa::pcm::HwParams<'a>, BackendSpecificError> {
|
||||
let hw_params = alsa::pcm::HwParams::any(pcm_handle)?;
|
||||
let mut hw_params = alsa::pcm::HwParams::any(pcm_handle)?;
|
||||
hw_params.set_access(alsa::pcm::Access::RWInterleaved)?;
|
||||
|
||||
let sample_format = if cfg!(target_endian = "big") {
|
||||
|
@ -902,14 +891,11 @@ fn set_hw_params_from_format<'a>(
|
|||
hw_params.set_rate(config.sample_rate.0, alsa::ValueOr::Nearest)?;
|
||||
hw_params.set_channels(config.channels as u32)?;
|
||||
|
||||
match config.buffer_size {
|
||||
BufferSize::Fixed(v) => hw_params.set_buffer_size(v as alsa::pcm::Frames)?,
|
||||
BufferSize::Default => {
|
||||
// These values together represent a moderate latency and wakeup interval.
|
||||
// Without them we are at the mercy of the device
|
||||
hw_params.set_period_time_near(25_000, alsa::ValueOr::Nearest)?;
|
||||
hw_params.set_buffer_time_near(100_000, alsa::ValueOr::Nearest)?;
|
||||
}
|
||||
// If this isn't set manually a overlarge buffer may be used causing audio delay
|
||||
let mut hw_params_copy = hw_params.clone();
|
||||
if let Err(_) = hw_params.set_buffer_time_near(100_000, alsa::ValueOr::Nearest) {
|
||||
// Swap out the params with errors for a snapshot taken before the error was introduced.
|
||||
mem::swap(&mut hw_params_copy, &mut hw_params);
|
||||
}
|
||||
|
||||
pcm_handle.hw_params(&hw_params)?;
|
||||
|
|
|
@ -12,7 +12,6 @@ use DeviceNameError;
|
|||
use DevicesError;
|
||||
use SampleFormat;
|
||||
use SampleRate;
|
||||
use SupportedBufferSize;
|
||||
use SupportedStreamConfig;
|
||||
use SupportedStreamConfigRange;
|
||||
use SupportedStreamConfigsError;
|
||||
|
@ -78,13 +77,9 @@ impl Device {
|
|||
continue;
|
||||
}
|
||||
for channels in 1..f.channels + 1 {
|
||||
supported_configs.push(SupportedStreamConfigRange {
|
||||
channels,
|
||||
min_sample_rate: rate,
|
||||
max_sample_rate: rate,
|
||||
buffer_size: f.buffer_size.clone(),
|
||||
sample_format: f.sample_format.clone(),
|
||||
})
|
||||
f.channels = channels;
|
||||
f.sample_rate = rate;
|
||||
supported_configs.push(SupportedStreamConfigRange::from(f.clone()));
|
||||
}
|
||||
}
|
||||
Ok(supported_configs.into_iter())
|
||||
|
@ -115,13 +110,9 @@ impl Device {
|
|||
continue;
|
||||
}
|
||||
for channels in 1..f.channels + 1 {
|
||||
supported_configs.push(SupportedStreamConfigRange {
|
||||
channels,
|
||||
min_sample_rate: rate,
|
||||
max_sample_rate: rate,
|
||||
buffer_size: f.buffer_size.clone(),
|
||||
sample_format: f.sample_format.clone(),
|
||||
})
|
||||
f.channels = channels;
|
||||
f.sample_rate = rate;
|
||||
supported_configs.push(SupportedStreamConfigRange::from(f.clone()));
|
||||
}
|
||||
}
|
||||
Ok(supported_configs.into_iter())
|
||||
|
@ -131,11 +122,6 @@ impl Device {
|
|||
pub fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
|
||||
let channels = self.driver.channels().map_err(default_config_err)?.ins as u16;
|
||||
let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _);
|
||||
let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?;
|
||||
let buffer_size = SupportedBufferSize::Range {
|
||||
min: min as u32,
|
||||
max: max as u32,
|
||||
};
|
||||
// Map th ASIO sample type to a CPAL sample type
|
||||
let data_type = self.driver.input_data_type().map_err(default_config_err)?;
|
||||
let sample_format = convert_data_type(&data_type)
|
||||
|
@ -143,7 +129,6 @@ impl Device {
|
|||
Ok(SupportedStreamConfig {
|
||||
channels,
|
||||
sample_rate,
|
||||
buffer_size,
|
||||
sample_format,
|
||||
})
|
||||
}
|
||||
|
@ -152,18 +137,12 @@ impl Device {
|
|||
pub fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
|
||||
let channels = self.driver.channels().map_err(default_config_err)?.outs as u16;
|
||||
let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _);
|
||||
let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?;
|
||||
let buffer_size = SupportedBufferSize::Range {
|
||||
min: min as u32,
|
||||
max: max as u32,
|
||||
};
|
||||
let data_type = self.driver.output_data_type().map_err(default_config_err)?;
|
||||
let sample_format = convert_data_type(&data_type)
|
||||
.ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?;
|
||||
Ok(SupportedStreamConfig {
|
||||
channels,
|
||||
sample_rate,
|
||||
buffer_size,
|
||||
sample_format,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ use self::num_traits::PrimInt;
|
|||
use super::parking_lot::Mutex;
|
||||
use super::Device;
|
||||
use crate::{
|
||||
BackendSpecificError, BufferSize, BuildStreamError, Data, InputCallbackInfo,
|
||||
OutputCallbackInfo, PauseStreamError, PlayStreamError, Sample, SampleFormat, StreamConfig,
|
||||
StreamError,
|
||||
BackendSpecificError, BuildStreamError, Data, InputCallbackInfo, OutputCallbackInfo,
|
||||
PauseStreamError, PlayStreamError, Sample, SampleFormat, StreamConfig, StreamError,
|
||||
SupportedStreamConfig,
|
||||
};
|
||||
use std;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
@ -482,12 +482,6 @@ impl Device {
|
|||
}?;
|
||||
let num_channels = config.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock();
|
||||
|
||||
let buffer_size = match config.buffer_size {
|
||||
BufferSize::Fixed(v) => Some(v as i32),
|
||||
BufferSize::Default => None,
|
||||
};
|
||||
|
||||
// Either create a stream if thers none or had back the
|
||||
// size of the current one.
|
||||
match streams.input {
|
||||
|
@ -495,7 +489,7 @@ impl Device {
|
|||
None => {
|
||||
let output = streams.output.take();
|
||||
self.driver
|
||||
.prepare_input_stream(output, num_channels, buffer_size)
|
||||
.prepare_input_stream(output, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.input {
|
||||
Some(ref inp) => inp.buffer_size as usize,
|
||||
|
@ -529,12 +523,6 @@ impl Device {
|
|||
}?;
|
||||
let num_channels = config.channels as usize;
|
||||
let ref mut streams = *self.asio_streams.lock();
|
||||
|
||||
let buffer_size = match config.buffer_size {
|
||||
BufferSize::Fixed(v) => Some(v as i32),
|
||||
BufferSize::Default => None,
|
||||
};
|
||||
|
||||
// Either create a stream if thers none or had back the
|
||||
// size of the current one.
|
||||
match streams.output {
|
||||
|
@ -542,7 +530,7 @@ impl Device {
|
|||
None => {
|
||||
let output = streams.output.take();
|
||||
self.driver
|
||||
.prepare_output_stream(output, num_channels, buffer_size)
|
||||
.prepare_output_stream(output, num_channels)
|
||||
.map(|new_streams| {
|
||||
let bs = match new_streams.output {
|
||||
Some(ref out) => out.buffer_size as usize,
|
||||
|
@ -657,7 +645,6 @@ fn check_config(
|
|||
let StreamConfig {
|
||||
channels,
|
||||
sample_rate,
|
||||
buffer_size,
|
||||
} = config;
|
||||
// Try and set the sample rate to what the user selected.
|
||||
let sample_rate = sample_rate.0.into();
|
||||
|
|
|
@ -5,8 +5,7 @@ use self::core_foundation_sys::string::{CFStringGetCString, CFStringGetCStringPt
|
|||
use self::coreaudio::audio_unit::render_callback::{self, data};
|
||||
use self::coreaudio::audio_unit::{AudioUnit, Element, Scope};
|
||||
use self::coreaudio::sys::{
|
||||
kAudioDevicePropertyAvailableNominalSampleRates, kAudioDevicePropertyBufferFrameSize,
|
||||
kAudioDevicePropertyBufferFrameSizeRange, kAudioDevicePropertyDeviceNameCFString,
|
||||
kAudioDevicePropertyAvailableNominalSampleRates, kAudioDevicePropertyDeviceNameCFString,
|
||||
kAudioDevicePropertyNominalSampleRate, kAudioDevicePropertyScopeOutput,
|
||||
kAudioDevicePropertyStreamConfiguration, kAudioDevicePropertyStreamFormat,
|
||||
kAudioFormatFlagIsFloat, kAudioFormatFlagIsPacked, kAudioFormatLinearPCM,
|
||||
|
@ -21,11 +20,10 @@ use self::coreaudio::sys::{
|
|||
};
|
||||
use crate::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
use crate::{
|
||||
BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data,
|
||||
DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo,
|
||||
PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError,
|
||||
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
|
||||
SupportedStreamConfigsError,
|
||||
BackendSpecificError, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError,
|
||||
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
|
||||
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
|
||||
SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
};
|
||||
use std::cell::RefCell;
|
||||
use std::ffi::CStr;
|
||||
|
@ -278,9 +276,6 @@ impl Device {
|
|||
let ranges: *mut AudioValueRange = ranges.as_mut_ptr() as *mut _;
|
||||
let ranges: &'static [AudioValueRange] = slice::from_raw_parts(ranges, n_ranges);
|
||||
|
||||
let audio_unit = audio_unit_from_device(self, true)?;
|
||||
let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?;
|
||||
|
||||
// Collect the supported formats for the device.
|
||||
let mut fmts = vec![];
|
||||
for range in ranges {
|
||||
|
@ -288,7 +283,6 @@ impl Device {
|
|||
channels: n_channels as ChannelCount,
|
||||
min_sample_rate: SampleRate(range.mMinimum as _),
|
||||
max_sample_rate: SampleRate(range.mMaximum as _),
|
||||
buffer_size: buffer_size.clone(),
|
||||
sample_format: sample_format,
|
||||
};
|
||||
fmts.push(fmt);
|
||||
|
@ -380,13 +374,9 @@ impl Device {
|
|||
}
|
||||
};
|
||||
|
||||
let audio_unit = audio_unit_from_device(self, true)?;
|
||||
let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?;
|
||||
|
||||
let config = SupportedStreamConfig {
|
||||
sample_rate: SampleRate(asbd.mSampleRate as _),
|
||||
channels: asbd.mChannelsPerFrame as _,
|
||||
buffer_size: buffer_size,
|
||||
sample_format: sample_format,
|
||||
};
|
||||
Ok(config)
|
||||
|
@ -436,24 +426,6 @@ impl From<coreaudio::Error> for BuildStreamError {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<coreaudio::Error> for SupportedStreamConfigsError {
|
||||
fn from(err: coreaudio::Error) -> SupportedStreamConfigsError {
|
||||
let description = format!("{}", err);
|
||||
let err = BackendSpecificError { description };
|
||||
// Check for possible DeviceNotAvailable variant
|
||||
SupportedStreamConfigsError::BackendSpecific { err }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<coreaudio::Error> for DefaultStreamConfigError {
|
||||
fn from(err: coreaudio::Error) -> DefaultStreamConfigError {
|
||||
let description = format!("{}", err);
|
||||
let err = BackendSpecificError { description };
|
||||
// Check for possible DeviceNotAvailable variant
|
||||
DefaultStreamConfigError::BackendSpecific { err }
|
||||
}
|
||||
}
|
||||
|
||||
// Create a coreaudio AudioStreamBasicDescription from a CPAL Format.
|
||||
fn asbd_from_config(
|
||||
config: &StreamConfig,
|
||||
|
@ -684,29 +656,6 @@ impl Device {
|
|||
let asbd = asbd_from_config(config, sample_format);
|
||||
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
|
||||
|
||||
// Set the buffersize
|
||||
match config.buffer_size {
|
||||
BufferSize::Fixed(v) => {
|
||||
let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?;
|
||||
match buffer_size_range {
|
||||
SupportedBufferSize::Range { min, max } => {
|
||||
if v >= min && v <= max {
|
||||
audio_unit.set_property(
|
||||
kAudioDevicePropertyBufferFrameSize,
|
||||
scope,
|
||||
element,
|
||||
Some(&v),
|
||||
)?
|
||||
} else {
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
}
|
||||
}
|
||||
SupportedBufferSize::Unknown => (),
|
||||
}
|
||||
}
|
||||
BufferSize::Default => (),
|
||||
}
|
||||
|
||||
// Register the callback that is being called by coreaudio whenever it needs data to be
|
||||
// fed to the audio buffer.
|
||||
let bytes_per_channel = sample_format.sample_size();
|
||||
|
@ -778,29 +727,6 @@ impl Device {
|
|||
let asbd = asbd_from_config(config, sample_format);
|
||||
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
|
||||
|
||||
// Set the buffersize
|
||||
match config.buffer_size {
|
||||
BufferSize::Fixed(v) => {
|
||||
let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?;
|
||||
match buffer_size_range {
|
||||
SupportedBufferSize::Range { min, max } => {
|
||||
if v >= min && v <= max {
|
||||
audio_unit.set_property(
|
||||
kAudioDevicePropertyBufferFrameSize,
|
||||
scope,
|
||||
element,
|
||||
Some(&v),
|
||||
)?
|
||||
} else {
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
}
|
||||
}
|
||||
SupportedBufferSize::Unknown => (),
|
||||
}
|
||||
}
|
||||
BufferSize::Default => (),
|
||||
}
|
||||
|
||||
// Register the callback that is being called by coreaudio whenever it needs data to be
|
||||
// fed to the audio buffer.
|
||||
let bytes_per_channel = sample_format.sample_size();
|
||||
|
@ -922,18 +848,3 @@ fn check_os_status(os_status: OSStatus) -> Result<(), BackendSpecificError> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_io_buffer_frame_size_range(
|
||||
audio_unit: &AudioUnit,
|
||||
) -> Result<SupportedBufferSize, coreaudio::Error> {
|
||||
let buffer_size_range: AudioValueRange = audio_unit.get_property(
|
||||
kAudioDevicePropertyBufferFrameSizeRange,
|
||||
Scope::Global,
|
||||
Element::Output,
|
||||
)?;
|
||||
|
||||
Ok(SupportedBufferSize::Range {
|
||||
min: buffer_size_range.mMinimum as u32,
|
||||
max: buffer_size_range.mMaximum as u32,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -8,10 +8,10 @@ use stdweb::web::TypedArray;
|
|||
use stdweb::Reference;
|
||||
|
||||
use crate::{
|
||||
BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
|
||||
BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
|
||||
InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat,
|
||||
SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig,
|
||||
SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange,
|
||||
SupportedStreamConfigsError,
|
||||
};
|
||||
use traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
|
||||
|
@ -41,16 +41,6 @@ pub struct StreamId(usize);
|
|||
pub type SupportedInputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
|
||||
pub type SupportedOutputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
|
||||
|
||||
const MIN_CHANNELS: u16 = 1;
|
||||
const MAX_CHANNELS: u16 = 32;
|
||||
const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000);
|
||||
const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000);
|
||||
const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100);
|
||||
const MIN_BUFFER_SIZE: u32 = 1;
|
||||
const MAX_BUFFER_SIZE: u32 = std::u32::MAX;
|
||||
const DEFAULT_BUFFER_SIZE: usize = 2048;
|
||||
const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32;
|
||||
|
||||
impl Host {
|
||||
pub fn new() -> Result<Self, crate::HostUnavailable> {
|
||||
stdweb::initialize();
|
||||
|
@ -81,20 +71,21 @@ impl Device {
|
|||
fn supported_output_configs(
|
||||
&self,
|
||||
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
|
||||
let buffer_size = SupportedBufferSize::Range {
|
||||
min: MIN_BUFFER_SIZE,
|
||||
max: MAX_BUFFER_SIZE,
|
||||
};
|
||||
let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS)
|
||||
.map(|channels| SupportedStreamConfigRange {
|
||||
channels,
|
||||
min_sample_rate: MIN_SAMPLE_RATE,
|
||||
max_sample_rate: MAX_SAMPLE_RATE,
|
||||
buffer_size: buffer_size.clone(),
|
||||
sample_format: SUPPORTED_SAMPLE_FORMAT,
|
||||
})
|
||||
.collect();
|
||||
Ok(configs.into_iter())
|
||||
// TODO: right now cpal's API doesn't allow flexibility here
|
||||
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
|
||||
// this ever becomes more flexible, don't forget to change that
|
||||
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
|
||||
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
|
||||
//
|
||||
// UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and
|
||||
// filter out those that lay outside the range specified above.
|
||||
Ok(vec![SupportedStreamConfigRange {
|
||||
channels: 2,
|
||||
min_sample_rate: ::SampleRate(44100),
|
||||
max_sample_rate: ::SampleRate(44100),
|
||||
sample_format: ::SampleFormat::F32,
|
||||
}]
|
||||
.into_iter())
|
||||
}
|
||||
|
||||
fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
|
||||
|
@ -102,15 +93,12 @@ impl Device {
|
|||
}
|
||||
|
||||
fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
|
||||
const EXPECT: &str = "expected at least one valid webaudio stream config";
|
||||
let mut configs: Vec<_> = self.supported_output_configs().expect(EXPECT).collect();
|
||||
configs.sort_by(|a, b| a.cmp_default_heuristics(b));
|
||||
let config = configs
|
||||
.into_iter()
|
||||
.next()
|
||||
.expect(EXPECT)
|
||||
.with_sample_rate(DEFAULT_SAMPLE_RATE);
|
||||
Ok(config)
|
||||
// TODO: because it is hard coded, see supported_output_configs.
|
||||
Ok(SupportedStreamConfig {
|
||||
channels: 2,
|
||||
sample_rate: ::SampleRate(44100),
|
||||
sample_format: ::SampleFormat::F32,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,7 +169,7 @@ impl DeviceTrait for Device {
|
|||
|
||||
fn build_output_stream_raw<D, E>(
|
||||
&self,
|
||||
config: &StreamConfig,
|
||||
_config: &StreamConfig,
|
||||
sample_format: SampleFormat,
|
||||
data_callback: D,
|
||||
error_callback: E,
|
||||
|
@ -190,20 +178,11 @@ impl DeviceTrait for Device {
|
|||
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
if !valid_config(config, sample_format) {
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
}
|
||||
|
||||
let buffer_size_frames = match config.buffer_size {
|
||||
BufferSize::Fixed(v) => {
|
||||
if v == 0 {
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
} else {
|
||||
v as usize
|
||||
}
|
||||
}
|
||||
BufferSize::Default => DEFAULT_BUFFER_SIZE,
|
||||
};
|
||||
assert_eq!(
|
||||
sample_format,
|
||||
SampleFormat::F32,
|
||||
"emscripten backend currently only supports `f32` data",
|
||||
);
|
||||
|
||||
// Create the stream.
|
||||
let audio_ctxt_ref = js!(return new AudioContext()).into_reference().unwrap();
|
||||
|
@ -220,14 +199,7 @@ impl DeviceTrait for Device {
|
|||
// See also: The call to `set_timeout` at the end of the `audio_callback_fn` which creates
|
||||
// the loop.
|
||||
set_timeout(
|
||||
|| {
|
||||
audio_callback_fn::<D, E>(
|
||||
user_data_ptr as *mut c_void,
|
||||
config,
|
||||
sample_format,
|
||||
buffer_size_frames,
|
||||
)
|
||||
},
|
||||
|| audio_callback_fn::<D, E>(user_data_ptr as *mut c_void),
|
||||
10,
|
||||
);
|
||||
|
||||
|
@ -251,18 +223,12 @@ impl StreamTrait for Stream {
|
|||
|
||||
// The first argument of the callback function (a `void*`) is a casted pointer to `self`
|
||||
// and to the `callback` parameter that was passed to `run`.
|
||||
fn audio_callback_fn<D, E>(
|
||||
user_data_ptr: *mut c_void,
|
||||
config: &StreamConfig,
|
||||
sample_format: SampleFormat,
|
||||
buffer_size_frames: usize,
|
||||
) where
|
||||
fn audio_callback_fn<D, E>(user_data_ptr: *mut c_void)
|
||||
where
|
||||
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
|
||||
E: FnMut(StreamError) + Send + 'static,
|
||||
{
|
||||
let num_channels = config.channels as usize;
|
||||
let sample_rate = config.sample_rate.0;
|
||||
let buffer_size_samples = buffer_size_frames * num_channels;
|
||||
const SAMPLE_RATE: usize = 44100;
|
||||
|
||||
unsafe {
|
||||
let user_data_ptr2 = user_data_ptr as *mut (&Stream, D, E);
|
||||
|
@ -271,11 +237,12 @@ fn audio_callback_fn<D, E>(
|
|||
let audio_ctxt = &stream.audio_ctxt_ref;
|
||||
|
||||
// TODO: We should be re-using a buffer.
|
||||
let mut temporary_buffer = vec![0f32; buffer_size_samples];
|
||||
let mut temporary_buffer = vec![0.0; SAMPLE_RATE * 2 / 3];
|
||||
|
||||
{
|
||||
let len = temporary_buffer.len();
|
||||
let data = temporary_buffer.as_mut_ptr() as *mut ();
|
||||
let sample_format = SampleFormat::F32;
|
||||
let mut data = Data::from_parts(data, len, sample_format);
|
||||
|
||||
let now_secs: f64 = js!(@{audio_ctxt}.getOutputTimestamp().currentTime)
|
||||
|
@ -286,7 +253,7 @@ fn audio_callback_fn<D, E>(
|
|||
// we estimate based on buffer size instead. Probably should use this, but it's only
|
||||
// supported by firefox (2020-04-28).
|
||||
// let latency_secs: f64 = js!(@{audio_ctxt}.outputLatency).try_into().unwrap();
|
||||
let buffer_duration = frames_to_duration(len, sample_rate as usize);
|
||||
let buffer_duration = frames_to_duration(len, SAMPLE_RATE);
|
||||
let playback = callback
|
||||
.add(buffer_duration)
|
||||
.expect("`playback` occurs beyond representation supported by `StreamInstant`");
|
||||
|
@ -306,19 +273,19 @@ fn audio_callback_fn<D, E>(
|
|||
typed_array
|
||||
};
|
||||
|
||||
let num_channels = 2u32; // TODO: correct value
|
||||
debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
|
||||
|
||||
js!(
|
||||
var src_buffer = new Float32Array(@{typed_array}.buffer);
|
||||
var context = @{audio_ctxt};
|
||||
var buffer_size_frames = @{buffer_size_frames as u32};
|
||||
var num_channels = @{num_channels as u32};
|
||||
var sample_rate = sample_rate;
|
||||
var buf_len = @{temporary_buffer.len() as u32};
|
||||
var num_channels = @{num_channels};
|
||||
|
||||
var buffer = context.createBuffer(num_channels, buffer_size_frames, sample_rate);
|
||||
var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
|
||||
for (var channel = 0; channel < num_channels; ++channel) {
|
||||
var buffer_content = buffer.getChannelData(channel);
|
||||
for (var i = 0; i < buffer_size_frames; ++i) {
|
||||
for (var i = 0; i < buf_len / num_channels; ++i) {
|
||||
buffer_content[i] = src_buffer[i * num_channels + channel];
|
||||
}
|
||||
}
|
||||
|
@ -332,10 +299,7 @@ fn audio_callback_fn<D, E>(
|
|||
// TODO: handle latency better ; right now we just use setInterval with the amount of sound
|
||||
// data that is in each buffer ; this is obviously bad, and also the schedule is too tight
|
||||
// and there may be underflows
|
||||
set_timeout(
|
||||
|| audio_callback_fn::<D, E>(user_data_ptr, config, sample_format, buffer_size_frames),
|
||||
buffer_size_frames as u32 * 1000 / sample_rate,
|
||||
);
|
||||
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr), 330);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -384,15 +348,6 @@ fn is_webaudio_available() -> bool {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
// Whether or not the given stream configuration is valid for building a stream.
|
||||
fn valid_config(conf: &StreamConfig, sample_format: SampleFormat) -> bool {
|
||||
conf.channels <= MAX_CHANNELS
|
||||
&& conf.channels >= MIN_CHANNELS
|
||||
&& conf.sample_rate <= MAX_SAMPLE_RATE
|
||||
&& conf.sample_rate >= MIN_SAMPLE_RATE
|
||||
&& sample_format == SUPPORTED_SAMPLE_FORMAT
|
||||
}
|
||||
|
||||
// Convert the given duration in frames at the given sample rate to a `std::time::Duration`.
|
||||
fn frames_to_duration(frames: usize, rate: usize) -> std::time::Duration {
|
||||
let secsf = frames as f64 / rate as f64;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::{
|
||||
BackendSpecificError, BufferSize, Data, DefaultStreamConfigError, DeviceNameError,
|
||||
DevicesError, InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig,
|
||||
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
|
||||
SupportedStreamConfigsError, COMMON_SAMPLE_RATES,
|
||||
BackendSpecificError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
|
||||
InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig,
|
||||
SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
COMMON_SAMPLE_RATES,
|
||||
};
|
||||
use std;
|
||||
use std::ffi::OsString;
|
||||
|
@ -27,7 +27,6 @@ use super::winapi::shared::mmreg;
|
|||
use super::winapi::shared::winerror;
|
||||
use super::winapi::shared::wtypes;
|
||||
use super::winapi::Interface;
|
||||
|
||||
// https://msdn.microsoft.com/en-us/library/cc230355.aspx
|
||||
use super::winapi::um::audioclient::{
|
||||
self, IAudioClient, IID_IAudioClient, AUDCLNT_E_DEVICE_INVALIDATED,
|
||||
|
@ -319,11 +318,9 @@ unsafe fn format_from_waveformatex_ptr(
|
|||
// Unknown data format returned by GetMixFormat.
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let format = SupportedStreamConfig {
|
||||
channels: (*waveformatex_ptr).nChannels as _,
|
||||
sample_rate: SampleRate((*waveformatex_ptr).nSamplesPerSec),
|
||||
buffer_size: SupportedBufferSize::Unknown,
|
||||
sample_format,
|
||||
};
|
||||
Some(format)
|
||||
|
@ -516,7 +513,7 @@ impl Device {
|
|||
// TODO: Test the different sample formats?
|
||||
|
||||
// Create the supported formats.
|
||||
let format = match format_from_waveformatex_ptr(default_waveformatex_ptr.0) {
|
||||
let mut format = match format_from_waveformatex_ptr(default_waveformatex_ptr.0) {
|
||||
Some(fmt) => fmt,
|
||||
None => {
|
||||
let description =
|
||||
|
@ -528,13 +525,8 @@ impl Device {
|
|||
};
|
||||
let mut supported_formats = Vec::with_capacity(supported_sample_rates.len());
|
||||
for rate in supported_sample_rates {
|
||||
supported_formats.push(SupportedStreamConfigRange {
|
||||
channels: format.channels.clone(),
|
||||
min_sample_rate: SampleRate(rate as _),
|
||||
max_sample_rate: SampleRate(rate as _),
|
||||
buffer_size: format.buffer_size.clone(),
|
||||
sample_format: format.sample_format.clone(),
|
||||
})
|
||||
format.sample_rate = SampleRate(rate as _);
|
||||
supported_formats.push(SupportedStreamConfigRange::from(format.clone()));
|
||||
}
|
||||
Ok(supported_formats.into_iter())
|
||||
}
|
||||
|
@ -647,16 +639,6 @@ impl Device {
|
|||
}
|
||||
};
|
||||
|
||||
match config.buffer_size {
|
||||
BufferSize::Fixed(_) => {
|
||||
// TO DO: We need IAudioClient3 to get buffersize ranges first
|
||||
// Otherwise the supported ranges are unknown. In the mean time
|
||||
// the smallest buffersize is selected and used.
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
}
|
||||
BufferSize::Default => (),
|
||||
};
|
||||
|
||||
// Computing the format and initializing the device.
|
||||
let waveformatex = {
|
||||
let format_attempt = config_to_waveformatextensible(config, sample_format)
|
||||
|
@ -809,16 +791,6 @@ impl Device {
|
|||
}
|
||||
};
|
||||
|
||||
match config.buffer_size {
|
||||
BufferSize::Fixed(_) => {
|
||||
// TO DO: We need IAudioClient3 to get buffersize ranges first
|
||||
// Otherwise the supported ranges are unknown. In the mean time
|
||||
// the smallest buffersize is selected and used.
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
}
|
||||
BufferSize::Default => (),
|
||||
};
|
||||
|
||||
// Computing the format and initializing the device.
|
||||
let waveformatex = {
|
||||
let format_attempt = config_to_waveformatextensible(config, sample_format)
|
||||
|
@ -841,7 +813,6 @@ impl Device {
|
|||
&format_attempt.Format,
|
||||
ptr::null(),
|
||||
);
|
||||
|
||||
match check_result(hresult) {
|
||||
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
|
||||
(*audio_client).Release();
|
||||
|
|
|
@ -7,10 +7,10 @@ use self::wasm_bindgen::prelude::*;
|
|||
use self::wasm_bindgen::JsCast;
|
||||
use self::web_sys::{AudioContext, AudioContextOptions};
|
||||
use crate::{
|
||||
BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError,
|
||||
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
|
||||
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize,
|
||||
SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
BackendSpecificError, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError,
|
||||
DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError,
|
||||
SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
|
||||
SupportedStreamConfigRange, SupportedStreamConfigsError,
|
||||
};
|
||||
use std::ops::DerefMut;
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
|
@ -39,9 +39,6 @@ const MAX_CHANNELS: u16 = 32;
|
|||
const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000);
|
||||
const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000);
|
||||
const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100);
|
||||
const MIN_BUFFER_SIZE: u32 = 1;
|
||||
const MAX_BUFFER_SIZE: u32 = std::u32::MAX;
|
||||
const DEFAULT_BUFFER_SIZE: usize = 2048;
|
||||
const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32;
|
||||
|
||||
impl Host {
|
||||
|
@ -96,16 +93,11 @@ impl Device {
|
|||
fn supported_output_configs(
|
||||
&self,
|
||||
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
|
||||
let buffer_size = SupportedBufferSize::Range {
|
||||
min: MIN_BUFFER_SIZE,
|
||||
max: MAX_BUFFER_SIZE,
|
||||
};
|
||||
let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS)
|
||||
.map(|channels| SupportedStreamConfigRange {
|
||||
channels,
|
||||
min_sample_rate: MIN_SAMPLE_RATE,
|
||||
max_sample_rate: MAX_SAMPLE_RATE,
|
||||
buffer_size: buffer_size.clone(),
|
||||
sample_format: SUPPORTED_SAMPLE_FORMAT,
|
||||
})
|
||||
.collect();
|
||||
|
@ -125,7 +117,7 @@ impl Device {
|
|||
configs.sort_by(|a, b| a.cmp_default_heuristics(b));
|
||||
let config = configs
|
||||
.into_iter()
|
||||
.last()
|
||||
.next()
|
||||
.expect(EXPECT)
|
||||
.with_sample_rate(DEFAULT_SAMPLE_RATE);
|
||||
Ok(config)
|
||||
|
@ -198,20 +190,11 @@ impl DeviceTrait for Device {
|
|||
}
|
||||
|
||||
let n_channels = config.channels as usize;
|
||||
|
||||
let buffer_size_frames = match config.buffer_size {
|
||||
BufferSize::Fixed(v) => {
|
||||
if v == 0 {
|
||||
return Err(BuildStreamError::StreamConfigNotSupported);
|
||||
} else {
|
||||
v as usize
|
||||
}
|
||||
}
|
||||
BufferSize::Default => DEFAULT_BUFFER_SIZE,
|
||||
};
|
||||
// Use a buffer period of 1/3s for this early proof of concept.
|
||||
// TODO: Change this to the requested buffer size when updating for the buffer size API.
|
||||
let buffer_size_frames = (config.sample_rate.0 as f64 / 3.0).round() as usize;
|
||||
let buffer_size_samples = buffer_size_frames * n_channels;
|
||||
let buffer_time_step_secs = buffer_time_step_secs(buffer_size_frames, config.sample_rate);
|
||||
|
||||
let data_callback = Arc::new(Mutex::new(Box::new(data_callback)));
|
||||
|
||||
// Create the WebAudio stream.
|
||||
|
|
64
src/lib.rs
64
src/lib.rs
|
@ -180,22 +180,6 @@ pub type ChannelCount = u16;
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct SampleRate(pub u32);
|
||||
|
||||
/// The desired number of frames for the hardware buffer.
|
||||
pub type FrameCount = u32;
|
||||
|
||||
/// The buffer size used by the device.
|
||||
///
|
||||
/// Default is used when no specific buffer size is set and uses the default
|
||||
/// behavior of the given host. Note, the default buffer size may be surprisingly
|
||||
/// large, leading to latency issues. If low latency is desired, Fixed(BufferSize)
|
||||
/// should be used in accordance with the SupportedBufferSize range produced by
|
||||
/// the SupportedStreamConfig API.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum BufferSize {
|
||||
Default,
|
||||
Fixed(FrameCount),
|
||||
}
|
||||
|
||||
/// The set of parameters used to describe how to open a stream.
|
||||
///
|
||||
/// The sample format is omitted in favour of using a sample type.
|
||||
|
@ -203,19 +187,6 @@ pub enum BufferSize {
|
|||
pub struct StreamConfig {
|
||||
pub channels: ChannelCount,
|
||||
pub sample_rate: SampleRate,
|
||||
pub buffer_size: BufferSize,
|
||||
}
|
||||
|
||||
/// Describes the minimum and maximum supported buffer size for the device
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum SupportedBufferSize {
|
||||
Range {
|
||||
min: FrameCount,
|
||||
max: FrameCount,
|
||||
},
|
||||
/// In the case that the platform provides no way of getting the default
|
||||
/// buffersize before starting a stream.
|
||||
Unknown,
|
||||
}
|
||||
|
||||
/// Describes a range of supported stream configurations, retrieved via the
|
||||
|
@ -227,8 +198,6 @@ pub struct SupportedStreamConfigRange {
|
|||
pub(crate) min_sample_rate: SampleRate,
|
||||
/// Maximum value for the samples rate of the supported formats.
|
||||
pub(crate) max_sample_rate: SampleRate,
|
||||
/// Buffersize ranges supported by the device
|
||||
pub(crate) buffer_size: SupportedBufferSize,
|
||||
/// Type of data expected by the device.
|
||||
pub(crate) sample_format: SampleFormat,
|
||||
}
|
||||
|
@ -239,7 +208,6 @@ pub struct SupportedStreamConfigRange {
|
|||
pub struct SupportedStreamConfig {
|
||||
channels: ChannelCount,
|
||||
sample_rate: SampleRate,
|
||||
buffer_size: SupportedBufferSize,
|
||||
sample_format: SampleFormat,
|
||||
}
|
||||
|
||||
|
@ -321,10 +289,6 @@ impl SupportedStreamConfig {
|
|||
self.sample_rate
|
||||
}
|
||||
|
||||
pub fn buffer_size(&self) -> &SupportedBufferSize {
|
||||
&self.buffer_size
|
||||
}
|
||||
|
||||
pub fn sample_format(&self) -> SampleFormat {
|
||||
self.sample_format
|
||||
}
|
||||
|
@ -333,7 +297,6 @@ impl SupportedStreamConfig {
|
|||
StreamConfig {
|
||||
channels: self.channels,
|
||||
sample_rate: self.sample_rate,
|
||||
buffer_size: BufferSize::Default,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -529,15 +492,11 @@ impl SupportedStreamConfigRange {
|
|||
self.max_sample_rate
|
||||
}
|
||||
|
||||
pub fn buffer_size(&self) -> &SupportedBufferSize {
|
||||
&self.buffer_size
|
||||
}
|
||||
|
||||
pub fn sample_format(&self) -> SampleFormat {
|
||||
self.sample_format
|
||||
}
|
||||
|
||||
/// Retrieve a `SupportedStreamConfig` with the given sample rate and buffer size.
|
||||
/// Retrieve a `SupportedStreamConfig` with the given sample rate.
|
||||
///
|
||||
/// **panic!**s if the given `sample_rate` is outside the range specified within this
|
||||
/// `SupportedStreamConfigRange` instance.
|
||||
|
@ -545,9 +504,8 @@ impl SupportedStreamConfigRange {
|
|||
assert!(self.min_sample_rate <= sample_rate && sample_rate <= self.max_sample_rate);
|
||||
SupportedStreamConfig {
|
||||
channels: self.channels,
|
||||
sample_rate,
|
||||
sample_format: self.sample_format,
|
||||
buffer_size: self.buffer_size,
|
||||
sample_rate,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -558,7 +516,6 @@ impl SupportedStreamConfigRange {
|
|||
channels: self.channels,
|
||||
sample_rate: self.max_sample_rate,
|
||||
sample_format: self.sample_format,
|
||||
buffer_size: self.buffer_size,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -637,35 +594,30 @@ impl SupportedStreamConfigRange {
|
|||
fn test_cmp_default_heuristics() {
|
||||
let mut formats = vec![
|
||||
SupportedStreamConfigRange {
|
||||
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
|
||||
channels: 2,
|
||||
min_sample_rate: SampleRate(1),
|
||||
max_sample_rate: SampleRate(96000),
|
||||
sample_format: SampleFormat::F32,
|
||||
},
|
||||
SupportedStreamConfigRange {
|
||||
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
|
||||
channels: 1,
|
||||
min_sample_rate: SampleRate(1),
|
||||
max_sample_rate: SampleRate(96000),
|
||||
sample_format: SampleFormat::F32,
|
||||
},
|
||||
SupportedStreamConfigRange {
|
||||
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
|
||||
channels: 2,
|
||||
min_sample_rate: SampleRate(1),
|
||||
max_sample_rate: SampleRate(96000),
|
||||
sample_format: SampleFormat::I16,
|
||||
},
|
||||
SupportedStreamConfigRange {
|
||||
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
|
||||
channels: 2,
|
||||
min_sample_rate: SampleRate(1),
|
||||
max_sample_rate: SampleRate(96000),
|
||||
sample_format: SampleFormat::U16,
|
||||
},
|
||||
SupportedStreamConfigRange {
|
||||
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
|
||||
channels: 2,
|
||||
min_sample_rate: SampleRate(1),
|
||||
max_sample_rate: SampleRate(22050),
|
||||
|
@ -708,6 +660,18 @@ impl From<SupportedStreamConfig> for StreamConfig {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<SupportedStreamConfig> for SupportedStreamConfigRange {
|
||||
#[inline]
|
||||
fn from(format: SupportedStreamConfig) -> SupportedStreamConfigRange {
|
||||
SupportedStreamConfigRange {
|
||||
channels: format.channels,
|
||||
min_sample_rate: format.sample_rate,
|
||||
max_sample_rate: format.sample_rate,
|
||||
sample_format: format.sample_format,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If a backend does not provide an API for retrieving supported formats, we query it with a bunch
|
||||
// of commonly used rates. This is always the case for wasapi and is sometimes the case for alsa.
|
||||
//
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Platform-specific items.
|
||||
//!
|
||||
//! This module also contains the implementation of the platform's dynamically dispatched `Host`
|
||||
//! type and its associated `Device`, `StreamId` and other associated types. These
|
||||
//! type and its associated `EventLoop`, `Device`, `StreamId` and other associated types. These
|
||||
//! types are useful in the case that users require switching between audio host APIs at runtime.
|
||||
|
||||
#[doc(inline)]
|
||||
|
@ -27,7 +27,7 @@ pub use self::platform_impl::*;
|
|||
// }
|
||||
// ```
|
||||
//
|
||||
// And so on for Device, Devices, Host, StreamId, SupportedInputConfigs,
|
||||
// And so on for Device, Devices, EventLoop, Host, StreamId, SupportedInputConfigs,
|
||||
// SupportedOutputConfigs and all their necessary trait implementations.
|
||||
// ```
|
||||
macro_rules! impl_platform_host {
|
||||
|
@ -558,8 +558,8 @@ mod platform_impl {
|
|||
)))]
|
||||
mod platform_impl {
|
||||
pub use crate::host::null::{
|
||||
Device as NullDevice, Devices as NullDevices, Host as NullHost,
|
||||
SupportedInputConfigs as NullSupportedInputConfigs,
|
||||
Device as NullDevice, Devices as NullDevices, EventLoop as NullEventLoop, Host as NullHost,
|
||||
StreamId as NullStreamId, SupportedInputConfigs as NullSupportedInputConfigs,
|
||||
SupportedOutputConfigs as NullSupportedOutputConfigs,
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in New Issue