Compare commits

...

43 Commits

Author SHA1 Message Date
Rob Watson 8789312826 webaudio: Fix default_output_config() 2020-09-10 15:50:35 +02:00
est31 3d2fddc13e
Merge pull request #469 from exrook/master
Fix compilation error when using null host
2020-08-30 18:57:56 +02:00
Jacob Hughes 5ca70fe6fe Fix compilation error when using null host
Also remove other references to EventLoop

Add CI run for wasm32-wasi platform to test null host
2020-08-30 12:19:50 -04:00
mitchmindtree 65584037fe
Merge pull request #449 from JoshuaBatty/develop
adding in a link to the CPAL WASM guide into the Readme
2020-08-02 17:44:43 +02:00
mitchmindtree bfffbb5241
Merge pull request #462 from AlexanderHarrison/master
Fix issue #461
2020-08-02 16:54:41 +02:00
mitchmindtree f120f8979d
Merge pull request #451 from RustAudio/dependabot/npm_and_yarn/examples/wasm-beep/lodash-4.17.19
Bump lodash from 4.17.15 to 4.17.19 in /examples/wasm-beep
2020-08-02 16:52:27 +02:00
AlexanderHarrison 104f04c9a1
Fix issue #461 2020-07-29 17:43:17 -06:00
est31 ecbb026e2b
Merge pull request #457 from richardmitic/raspberry-pi-c-long
Use alsa::pcm::Frames in alsa API calls instead of fixed-width integers.
2020-07-27 16:50:33 +02:00
Richard Mitic cb49e4446c Use alsa::pcm::Frames in alsa API calls instead of fixed-width integers.
`alsa::pcm::Frames` resolves to `c_long` which is 32 bits when compiling
for raspberry pi (armv7l).
2020-07-27 14:18:41 +02:00
est31 7a8f017b1d
Release 0.12.1 (#456) 2020-07-23 01:28:42 +02:00
est31 7021c717a2
Merge pull request #455 from est31/asio-sys-release
Release asio-sys 0.2.0
2020-07-23 01:16:01 +02:00
est31 42ee11affc Depend on version 0.2 of asio-sys 2020-07-23 00:55:40 +02:00
est31 0b185514b8 Release asio-sys 0.2.0 2020-07-23 00:55:09 +02:00
est31 822d9da643 Update repo url of asio-sys 2020-07-22 23:38:54 +02:00
est31 13041efbfd
Update bindgen dep of asio-sys to 0.54.0 and num-derive to 0.3 (#454) 2020-07-22 23:36:50 +02:00
dependabot[bot] 518d2ea519
Bump lodash from 4.17.15 to 4.17.19 in /examples/wasm-beep
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.15 to 4.17.19.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.15...4.17.19)

Signed-off-by: dependabot[bot] <support@github.com>
2020-07-20 21:07:35 +00:00
JoshuaBatty ce9a6c4fc3 adding in a link to the CPAL WASM guide into the Readme 2020-07-15 12:55:33 +02:00
mitchmindtree 997fb9fa8a
Merge pull request #438 from rfwatson/add-cmp-tests
Add test coverage for cmp_default_heuristics
2020-07-14 22:15:01 +02:00
Rob Watson 8a3dc611fa Fix compilation warnings 2020-07-14 22:05:55 +02:00
Rob Watson c8fc1002cb Add test for cmp_default_heuristics 2020-07-14 22:05:55 +02:00
mitchmindtree b78ff83c03
Merge pull request #401 from JoshuaBatty/buffersize_range
Prototyping an API for getting the supported min and max buffersizes
2020-07-14 14:35:46 +02:00
JoshuaBatty e739dbf32d cargo fmt 2020-07-14 14:11:35 +02:00
JoshuaBatty e854c4e54c added in sniperrifle2004 code from PR 431 2020-07-14 14:10:00 +02:00
JoshuaBatty f285389002 fixed mismatched types in emscipten set_timeout 2020-07-14 13:51:49 +02:00
JoshuaBatty 4ce12a69b9 adds docs for BufferSize and fixes buffersize bugs in emscripten and alsa 2020-07-14 13:19:26 +02:00
est31 da40ea723e
Reenable the publish github workflow (#445)
The manual release of 0.12.0 is done.
Hopefully the issue is now resolved and in the future the CI can do it automatically.
2020-07-09 08:41:45 +02:00
est31 6ec1953424
Disable cargo publish invocation (#444)
To allow pushing to master after the 0.12.0 release.

The 0.12.0 release will be uploaded manually.
2020-07-09 07:41:20 +02:00
est31 62ba9a96b0
Publish version 0.12.0 (#443) 2020-07-09 06:40:17 +02:00
est31 ae0fc33b17
lazy_static and num-traits are only used by windows (#442) 2020-07-09 06:08:22 +02:00
JoshuaBatty 271cc34f3f cargo fmt fix for coreaudio 2020-05-27 13:28:18 +02:00
JoshuaBatty 10599ac826 fixed coreaudio buffersize errors 2020-05-27 13:15:44 +02:00
JoshuaBatty 9c85eecb59 cargo fmt fix 2020-05-27 12:15:31 +02:00
JoshuaBatty 7c1adce330 adds support for buffersizes in webaudio and emscripten 2020-05-27 12:05:55 +02:00
JoshuaBatty cf1a928b84 reset examples 2020-05-26 14:55:55 +02:00
JoshuaBatty 935fa280d3 buffersizes api for wasapi 2020-05-26 14:55:55 +02:00
JoshuaBatty 4cd9d0bcec finished implementing buffersizes for coreaudio 2020-05-26 14:55:55 +02:00
JoshuaBatty 9e660da433 implemented buffersizes for asio 2020-05-26 14:55:55 +02:00
JoshuaBatty 6edee6c6c2 coreaudio buffersize wip 2020-05-26 14:55:55 +02:00
JoshuaBatty d72b546dca gets buffersize range and allows to set buffersize for alsa 2020-05-26 14:55:55 +02:00
JoshuaBatty 7081c3bbd6 wip alsa buffersizes 2020-05-26 14:55:55 +02:00
JoshuaBatty 7eb45bca6f cargo fmt 2020-05-26 14:55:55 +02:00
JoshuaBatty 919722a785 with_sample_rate_and_buffer_size sample_rate is set to self.max_sample_rate 2020-05-26 14:55:55 +02:00
JoshuaBatty f762e5dd8f prototyping an API for getting min and max buffersize 2020-05-26 14:55:55 +02:00
18 changed files with 522 additions and 125 deletions

View File

@ -127,6 +127,25 @@ jobs:
- name: Build beep example
run: cargo build --example beep --target ${{ matrix.target }} --features=wasm-bindgen
wasm32-wasi-test:
strategy:
matrix:
target: [wasm32-wasi]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Install stable
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
target: ${{ matrix.target }}
- name: Build beep example
run: cargo build --example beep --target ${{ matrix.target }}
windows-test:
strategy:
matrix:

View File

@ -1,4 +1,8 @@
# Unreleased
# Version 0.12.1 (2020-07-23)
- Bugfix release to get the asio feature working again.
# Version 0.12.0 (2020-07-09)
- Large refactor removing the blocking EventLoop API.
- Rename many `Format` types to `StreamConfig`:

View File

@ -1,6 +1,6 @@
[package]
name = "cpal"
version = "0.11.0"
version = "0.12.1"
authors = ["The CPAL contributors", "Pierre Krieger <pierre.krieger1708@gmail.com>"]
description = "Low-level cross-platform audio I/O library in pure Rust."
repository = "https://github.com/rustaudio/cpal"
@ -9,12 +9,10 @@ license = "Apache-2.0"
keywords = ["audio", "sound"]
[features]
asio = ["asio-sys"] # Only available on Windows. See README for setup instructions.
asio = ["asio-sys", "num-traits"] # Only available on Windows. See README for setup instructions.
[dependencies]
thiserror = "1.0.2"
lazy_static = "1.3"
num-traits = "0.2.6"
[dev-dependencies]
anyhow = "1.0.12"
@ -23,8 +21,10 @@ ringbuf = "0.1.6"
[target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3", features = ["audiosessiontypes", "audioclient", "coml2api", "combaseapi", "debug", "devpkey", "handleapi", "ksmedia", "mmdeviceapi", "objbase", "profileapi", "std", "synchapi", "winbase", "winuser"] }
asio-sys = { version = "0.1", path = "asio-sys", optional = true }
asio-sys = { version = "0.2", path = "asio-sys", optional = true }
num-traits = { version = "0.2.6", optional = true }
parking_lot = "0.9"
lazy_static = "1.3"
[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd"))'.dependencies]
alsa = "0.4.1"

View File

@ -26,6 +26,10 @@ Note that on Linux, the ALSA development files are required. These are provided
as part of the `libasound2-dev` package on Debian and Ubuntu distributions and
`alsa-lib-devel` on Fedora.
## Compiling for Web Assembly
If you are interested in using CPAL with WASM, please see [this guide](https://github.com/RustAudio/cpal/wiki/Setting-up-a-new-CPAL-WASM-project) in our Wiki which walks through setting up a new project from scratch.
## ASIO on Windows
[ASIO](https://en.wikipedia.org/wiki/Audio_Stream_Input/Output) is an audio

View File

@ -1,20 +1,20 @@
[package]
name = "asio-sys"
version = "0.1.0"
version = "0.2.0"
authors = ["Tom Gowan <tomrgowan@gmail.com>"]
description = "Low-level interface and binding generation for the steinberg ASIO SDK."
repository = "https://github.com/tomaka/cpal"
repository = "https://github.com/RustAudio/cpal/"
documentation = "https://docs.rs/asio-sys"
license = "Apache-2.0"
keywords = ["audio", "sound", "asio", "steinberg"]
build = "build.rs"
[target.'cfg(any(target_os = "windows"))'.build-dependencies]
bindgen = "0.51.0"
bindgen = "0.54.0"
walkdir = "2"
cc = "1.0.25"
[dependencies]
lazy_static = "1.0.0"
num-derive = "0.2"
num-derive = "0.3"
num-traits = "0.2"

View File

@ -19,6 +19,7 @@ pub enum AsioError {
HardwareStuck,
NoRate,
ASE_NoMemory,
InvalidBufferSize,
UnknownError,
}
@ -63,6 +64,7 @@ impl fmt::Display for AsioError {
"sample clock or rate cannot be determined or is not present"
),
AsioError::ASE_NoMemory => write!(f, "not enough memory for completing the request"),
AsioError::InvalidBufferSize => write!(f, "buffersize out of range for device"),
AsioError::UnknownError => write!(f, "Error not in SDK"),
}
}
@ -94,6 +96,7 @@ impl Error for AsioError {
AsioError::HardwareStuck => "hardware is not running when sample position is inquired",
AsioError::NoRate => "sample clock or rate cannot be determined or is not present",
AsioError::ASE_NoMemory => "not enough memory for completing the request",
AsioError::InvalidBufferSize => "buffersize out of range for device",
AsioError::UnknownError => "Error not in SDK",
}
}

View File

@ -385,6 +385,14 @@ impl Driver {
Ok(channel)
}
/// Get the min and max supported buffersize of the driver.
pub fn buffersize_range(&self) -> Result<(c_long, c_long), AsioError> {
let buffer_sizes = asio_get_buffer_sizes()?;
let min = buffer_sizes.min;
let max = buffer_sizes.max;
Ok((min, max))
}
/// Get current sample rate of the driver.
pub fn sample_rate(&self) -> Result<c_double, AsioError> {
let mut rate: c_double = 0.0;
@ -431,8 +439,14 @@ impl Driver {
///
/// This will destroy any already allocated buffers.
///
/// The preferred buffer size from ASIO is used.
fn create_buffers(&self, buffer_infos: &mut [AsioBufferInfo]) -> Result<c_long, AsioError> {
/// If buffersize is None then the preferred buffer size from ASIO is used,
/// otherwise the desired buffersize is used if the requeted size is within
/// the range of accepted buffersizes for the device.
fn create_buffers(
&self,
buffer_infos: &mut [AsioBufferInfo],
buffer_size: Option<i32>,
) -> Result<c_long, AsioError> {
let num_channels = buffer_infos.len();
// To pass as ai::ASIOCallbacks
@ -449,6 +463,17 @@ impl Driver {
);
}
let buffer_size = match buffer_size {
Some(v) => {
if v <= buffer_sizes.max {
v
} else {
return Err(AsioError::InvalidBufferSize);
}
}
None => buffer_sizes.pref,
};
// Ensure the driver is in the `Initialized` state.
if let DriverState::Running = *state {
state.stop()?;
@ -460,23 +485,27 @@ impl Driver {
asio_result!(ai::ASIOCreateBuffers(
buffer_infos.as_mut_ptr() as *mut _,
num_channels as i32,
buffer_sizes.pref,
buffer_size,
&mut callbacks as *mut _ as *mut _,
))?;
}
*state = DriverState::Prepared;
Ok(buffer_sizes.pref)
Ok(buffer_size)
}
/// Creates the streams.
///
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
/// default buffersize for the device is used.
///
/// Both input and output streams need to be created together as a single slice of
/// `ASIOBufferInfo`.
fn create_streams(
&self,
mut input_buffer_infos: Vec<AsioBufferInfo>,
mut output_buffer_infos: Vec<AsioBufferInfo>,
buffer_size: Option<i32>,
) -> Result<AsioStreams, AsioError> {
let (input, output) = match (
input_buffer_infos.is_empty(),
@ -489,7 +518,7 @@ impl Driver {
let mut all_buffer_infos = input_buffer_infos;
all_buffer_infos.append(&mut output_buffer_infos);
// Create the buffers. On success, split the output and input again.
let buffer_size = self.create_buffers(&mut all_buffer_infos)?;
let buffer_size = self.create_buffers(&mut all_buffer_infos, buffer_size)?;
let output_buffer_infos = all_buffer_infos.split_off(split_point);
let input_buffer_infos = all_buffer_infos;
let input = Some(AsioStream {
@ -504,7 +533,7 @@ impl Driver {
}
// Just input
(false, true) => {
let buffer_size = self.create_buffers(&mut input_buffer_infos)?;
let buffer_size = self.create_buffers(&mut input_buffer_infos, buffer_size)?;
let input = Some(AsioStream {
buffer_infos: input_buffer_infos,
buffer_size,
@ -514,7 +543,7 @@ impl Driver {
}
// Just output
(true, false) => {
let buffer_size = self.create_buffers(&mut output_buffer_infos)?;
let buffer_size = self.create_buffers(&mut output_buffer_infos, buffer_size)?;
let input = None;
let output = Some(AsioStream {
buffer_infos: output_buffer_infos,
@ -537,17 +566,21 @@ impl Driver {
///
/// `num_channels` is the desired number of input channels.
///
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
/// default buffersize for the device is used.
///
/// This returns a full AsioStreams with both input and output if output was active.
pub fn prepare_input_stream(
&self,
output: Option<AsioStream>,
num_channels: usize,
buffer_size: Option<i32>,
) -> Result<AsioStreams, AsioError> {
let input_buffer_infos = prepare_buffer_infos(true, num_channels);
let output_buffer_infos = output
.map(|output| output.buffer_infos)
.unwrap_or_else(Vec::new);
self.create_streams(input_buffer_infos, output_buffer_infos)
self.create_streams(input_buffer_infos, output_buffer_infos, buffer_size)
}
/// Prepare the output stream.
@ -559,17 +592,21 @@ impl Driver {
///
/// `num_channels` is the desired number of output channels.
///
/// `buffer_size` sets the desired buffer_size. If None is passed in, then the
/// default buffersize for the device is used.
///
/// This returns a full AsioStreams with both input and output if input was active.
pub fn prepare_output_stream(
&self,
input: Option<AsioStream>,
num_channels: usize,
buffer_size: Option<i32>,
) -> Result<AsioStreams, AsioError> {
let input_buffer_infos = input
.map(|input| input.buffer_infos)
.unwrap_or_else(Vec::new);
let output_buffer_infos = prepare_buffer_infos(false, num_channels);
self.create_streams(input_buffer_infos, output_buffer_infos)
self.create_streams(input_buffer_infos, output_buffer_infos, buffer_size)
}
/// Releases buffers allocations.

View File

@ -11,6 +11,7 @@ fn main() -> Result<(), anyhow::Error> {
for host_id in available_hosts {
println!("{}", host_id.name());
let host = cpal::host_from_id(host_id)?;
let default_in = host.default_input_device().map(|e| e.name().unwrap());
let default_out = host.default_output_device().map(|e| e.name().unwrap());
println!(" Default Input Device:\n {:?}", default_in);

View File

@ -3417,9 +3417,9 @@
}
},
"lodash": {
"version": "4.17.15",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
"version": "4.17.19",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ=="
},
"loglevel": {
"version": "1.6.7",

View File

@ -3,16 +3,17 @@ extern crate libc;
use self::alsa::poll::Descriptors;
use crate::{
BackendSpecificError, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError,
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
SupportedStreamConfigRange, SupportedStreamConfigsError,
BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data,
DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo,
PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError,
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
};
use std::cmp;
use std::convert::TryInto;
use std::sync::Arc;
use std::thread::{self, JoinHandle};
use std::vec::IntoIter as VecIntoIter;
use std::{cmp, mem};
use traits::{DeviceTrait, HostTrait, StreamTrait};
pub use self::enumerate::{default_input_device, default_output_device, Devices};
@ -339,6 +340,14 @@ impl Device {
})
.collect::<Vec<_>>();
let min_buffer_size = hw_params.get_buffer_size_min()?;
let max_buffer_size = hw_params.get_buffer_size_max()?;
let buffer_size_range = SupportedBufferSize::Range {
min: min_buffer_size as u32,
max: max_buffer_size as u32,
};
let mut output = Vec::with_capacity(
supported_formats.len() * supported_channels.len() * sample_rates.len(),
);
@ -349,6 +358,7 @@ impl Device {
channels: channels.clone(),
min_sample_rate: SampleRate(min_rate as u32),
max_sample_rate: SampleRate(max_rate as u32),
buffer_size: buffer_size_range.clone(),
sample_format: sample_format,
});
}
@ -436,7 +446,9 @@ struct StreamInner {
// Minimum number of samples to put in the buffer.
period_len: usize,
#[allow(dead_code)]
// Whether or not the hardware supports pausing the stream.
// TODO: We need an API to expose this. See #197, #284.
can_pause: bool,
// In the case that the device does not return valid timestamps via `get_htstamp`, this field
@ -869,7 +881,7 @@ fn set_hw_params_from_format<'a>(
config: &StreamConfig,
sample_format: SampleFormat,
) -> Result<alsa::pcm::HwParams<'a>, BackendSpecificError> {
let mut hw_params = alsa::pcm::HwParams::any(pcm_handle)?;
let hw_params = alsa::pcm::HwParams::any(pcm_handle)?;
hw_params.set_access(alsa::pcm::Access::RWInterleaved)?;
let sample_format = if cfg!(target_endian = "big") {
@ -890,11 +902,14 @@ fn set_hw_params_from_format<'a>(
hw_params.set_rate(config.sample_rate.0, alsa::ValueOr::Nearest)?;
hw_params.set_channels(config.channels as u32)?;
// If this isn't set manually a overlarge buffer may be used causing audio delay
let mut hw_params_copy = hw_params.clone();
if let Err(_) = hw_params.set_buffer_time_near(100_000, alsa::ValueOr::Nearest) {
// Swap out the params with errors for a snapshot taken before the error was introduced.
mem::swap(&mut hw_params_copy, &mut hw_params);
match config.buffer_size {
BufferSize::Fixed(v) => hw_params.set_buffer_size(v as alsa::pcm::Frames)?,
BufferSize::Default => {
// These values together represent a moderate latency and wakeup interval.
// Without them we are at the mercy of the device
hw_params.set_period_time_near(25_000, alsa::ValueOr::Nearest)?;
hw_params.set_buffer_time_near(100_000, alsa::ValueOr::Nearest)?;
}
}
pcm_handle.hw_params(&hw_params)?;

View File

@ -12,6 +12,7 @@ use DeviceNameError;
use DevicesError;
use SampleFormat;
use SampleRate;
use SupportedBufferSize;
use SupportedStreamConfig;
use SupportedStreamConfigRange;
use SupportedStreamConfigsError;
@ -77,9 +78,13 @@ impl Device {
continue;
}
for channels in 1..f.channels + 1 {
f.channels = channels;
f.sample_rate = rate;
supported_configs.push(SupportedStreamConfigRange::from(f.clone()));
supported_configs.push(SupportedStreamConfigRange {
channels,
min_sample_rate: rate,
max_sample_rate: rate,
buffer_size: f.buffer_size.clone(),
sample_format: f.sample_format.clone(),
})
}
}
Ok(supported_configs.into_iter())
@ -110,9 +115,13 @@ impl Device {
continue;
}
for channels in 1..f.channels + 1 {
f.channels = channels;
f.sample_rate = rate;
supported_configs.push(SupportedStreamConfigRange::from(f.clone()));
supported_configs.push(SupportedStreamConfigRange {
channels,
min_sample_rate: rate,
max_sample_rate: rate,
buffer_size: f.buffer_size.clone(),
sample_format: f.sample_format.clone(),
})
}
}
Ok(supported_configs.into_iter())
@ -122,6 +131,11 @@ impl Device {
pub fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
let channels = self.driver.channels().map_err(default_config_err)?.ins as u16;
let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _);
let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?;
let buffer_size = SupportedBufferSize::Range {
min: min as u32,
max: max as u32,
};
// Map th ASIO sample type to a CPAL sample type
let data_type = self.driver.input_data_type().map_err(default_config_err)?;
let sample_format = convert_data_type(&data_type)
@ -129,6 +143,7 @@ impl Device {
Ok(SupportedStreamConfig {
channels,
sample_rate,
buffer_size,
sample_format,
})
}
@ -137,12 +152,18 @@ impl Device {
pub fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
let channels = self.driver.channels().map_err(default_config_err)?.outs as u16;
let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _);
let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?;
let buffer_size = SupportedBufferSize::Range {
min: min as u32,
max: max as u32,
};
let data_type = self.driver.output_data_type().map_err(default_config_err)?;
let sample_format = convert_data_type(&data_type)
.ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?;
Ok(SupportedStreamConfig {
channels,
sample_rate,
buffer_size,
sample_format,
})
}

View File

@ -5,9 +5,9 @@ use self::num_traits::PrimInt;
use super::parking_lot::Mutex;
use super::Device;
use crate::{
BackendSpecificError, BuildStreamError, Data, InputCallbackInfo, OutputCallbackInfo,
PauseStreamError, PlayStreamError, Sample, SampleFormat, StreamConfig, StreamError,
SupportedStreamConfig,
BackendSpecificError, BufferSize, BuildStreamError, Data, InputCallbackInfo,
OutputCallbackInfo, PauseStreamError, PlayStreamError, Sample, SampleFormat, StreamConfig,
StreamError,
};
use std;
use std::sync::atomic::{AtomicBool, Ordering};
@ -482,6 +482,12 @@ impl Device {
}?;
let num_channels = config.channels as usize;
let ref mut streams = *self.asio_streams.lock();
let buffer_size = match config.buffer_size {
BufferSize::Fixed(v) => Some(v as i32),
BufferSize::Default => None,
};
// Either create a stream if thers none or had back the
// size of the current one.
match streams.input {
@ -489,7 +495,7 @@ impl Device {
None => {
let output = streams.output.take();
self.driver
.prepare_input_stream(output, num_channels)
.prepare_input_stream(output, num_channels, buffer_size)
.map(|new_streams| {
let bs = match new_streams.input {
Some(ref inp) => inp.buffer_size as usize,
@ -523,6 +529,12 @@ impl Device {
}?;
let num_channels = config.channels as usize;
let ref mut streams = *self.asio_streams.lock();
let buffer_size = match config.buffer_size {
BufferSize::Fixed(v) => Some(v as i32),
BufferSize::Default => None,
};
// Either create a stream if thers none or had back the
// size of the current one.
match streams.output {
@ -530,7 +542,7 @@ impl Device {
None => {
let output = streams.output.take();
self.driver
.prepare_output_stream(output, num_channels)
.prepare_output_stream(output, num_channels, buffer_size)
.map(|new_streams| {
let bs = match new_streams.output {
Some(ref out) => out.buffer_size as usize,
@ -645,6 +657,7 @@ fn check_config(
let StreamConfig {
channels,
sample_rate,
buffer_size,
} = config;
// Try and set the sample rate to what the user selected.
let sample_rate = sample_rate.0.into();

View File

@ -5,7 +5,8 @@ use self::core_foundation_sys::string::{CFStringGetCString, CFStringGetCStringPt
use self::coreaudio::audio_unit::render_callback::{self, data};
use self::coreaudio::audio_unit::{AudioUnit, Element, Scope};
use self::coreaudio::sys::{
kAudioDevicePropertyAvailableNominalSampleRates, kAudioDevicePropertyDeviceNameCFString,
kAudioDevicePropertyAvailableNominalSampleRates, kAudioDevicePropertyBufferFrameSize,
kAudioDevicePropertyBufferFrameSizeRange, kAudioDevicePropertyDeviceNameCFString,
kAudioDevicePropertyNominalSampleRate, kAudioDevicePropertyScopeOutput,
kAudioDevicePropertyStreamConfiguration, kAudioDevicePropertyStreamFormat,
kAudioFormatFlagIsFloat, kAudioFormatFlagIsPacked, kAudioFormatLinearPCM,
@ -20,10 +21,11 @@ use self::coreaudio::sys::{
};
use crate::traits::{DeviceTrait, HostTrait, StreamTrait};
use crate::{
BackendSpecificError, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError,
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
SupportedStreamConfigRange, SupportedStreamConfigsError,
BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data,
DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo,
PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError,
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
};
use std::cell::RefCell;
use std::ffi::CStr;
@ -276,6 +278,9 @@ impl Device {
let ranges: *mut AudioValueRange = ranges.as_mut_ptr() as *mut _;
let ranges: &'static [AudioValueRange] = slice::from_raw_parts(ranges, n_ranges);
let audio_unit = audio_unit_from_device(self, true)?;
let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?;
// Collect the supported formats for the device.
let mut fmts = vec![];
for range in ranges {
@ -283,6 +288,7 @@ impl Device {
channels: n_channels as ChannelCount,
min_sample_rate: SampleRate(range.mMinimum as _),
max_sample_rate: SampleRate(range.mMaximum as _),
buffer_size: buffer_size.clone(),
sample_format: sample_format,
};
fmts.push(fmt);
@ -374,9 +380,13 @@ impl Device {
}
};
let audio_unit = audio_unit_from_device(self, true)?;
let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?;
let config = SupportedStreamConfig {
sample_rate: SampleRate(asbd.mSampleRate as _),
channels: asbd.mChannelsPerFrame as _,
buffer_size: buffer_size,
sample_format: sample_format,
};
Ok(config)
@ -426,6 +436,24 @@ impl From<coreaudio::Error> for BuildStreamError {
}
}
impl From<coreaudio::Error> for SupportedStreamConfigsError {
fn from(err: coreaudio::Error) -> SupportedStreamConfigsError {
let description = format!("{}", err);
let err = BackendSpecificError { description };
// Check for possible DeviceNotAvailable variant
SupportedStreamConfigsError::BackendSpecific { err }
}
}
impl From<coreaudio::Error> for DefaultStreamConfigError {
fn from(err: coreaudio::Error) -> DefaultStreamConfigError {
let description = format!("{}", err);
let err = BackendSpecificError { description };
// Check for possible DeviceNotAvailable variant
DefaultStreamConfigError::BackendSpecific { err }
}
}
// Create a coreaudio AudioStreamBasicDescription from a CPAL Format.
fn asbd_from_config(
config: &StreamConfig,
@ -656,6 +684,29 @@ impl Device {
let asbd = asbd_from_config(config, sample_format);
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
// Set the buffersize
match config.buffer_size {
BufferSize::Fixed(v) => {
let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?;
match buffer_size_range {
SupportedBufferSize::Range { min, max } => {
if v >= min && v <= max {
audio_unit.set_property(
kAudioDevicePropertyBufferFrameSize,
scope,
element,
Some(&v),
)?
} else {
return Err(BuildStreamError::StreamConfigNotSupported);
}
}
SupportedBufferSize::Unknown => (),
}
}
BufferSize::Default => (),
}
// Register the callback that is being called by coreaudio whenever it needs data to be
// fed to the audio buffer.
let bytes_per_channel = sample_format.sample_size();
@ -727,6 +778,29 @@ impl Device {
let asbd = asbd_from_config(config, sample_format);
audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?;
// Set the buffersize
match config.buffer_size {
BufferSize::Fixed(v) => {
let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?;
match buffer_size_range {
SupportedBufferSize::Range { min, max } => {
if v >= min && v <= max {
audio_unit.set_property(
kAudioDevicePropertyBufferFrameSize,
scope,
element,
Some(&v),
)?
} else {
return Err(BuildStreamError::StreamConfigNotSupported);
}
}
SupportedBufferSize::Unknown => (),
}
}
BufferSize::Default => (),
}
// Register the callback that is being called by coreaudio whenever it needs data to be
// fed to the audio buffer.
let bytes_per_channel = sample_format.sample_size();
@ -848,3 +922,18 @@ fn check_os_status(os_status: OSStatus) -> Result<(), BackendSpecificError> {
}
}
}
fn get_io_buffer_frame_size_range(
audio_unit: &AudioUnit,
) -> Result<SupportedBufferSize, coreaudio::Error> {
let buffer_size_range: AudioValueRange = audio_unit.get_property(
kAudioDevicePropertyBufferFrameSizeRange,
Scope::Global,
Element::Output,
)?;
Ok(SupportedBufferSize::Range {
min: buffer_size_range.mMinimum as u32,
max: buffer_size_range.mMaximum as u32,
})
}

View File

@ -8,10 +8,10 @@ use stdweb::web::TypedArray;
use stdweb::Reference;
use crate::{
BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat,
StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig,
SupportedStreamConfigRange, SupportedStreamConfigsError,
};
use traits::{DeviceTrait, HostTrait, StreamTrait};
@ -41,6 +41,16 @@ pub struct StreamId(usize);
pub type SupportedInputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
pub type SupportedOutputConfigs = ::std::vec::IntoIter<SupportedStreamConfigRange>;
const MIN_CHANNELS: u16 = 1;
const MAX_CHANNELS: u16 = 32;
const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000);
const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000);
const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100);
const MIN_BUFFER_SIZE: u32 = 1;
const MAX_BUFFER_SIZE: u32 = std::u32::MAX;
const DEFAULT_BUFFER_SIZE: usize = 2048;
const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32;
impl Host {
pub fn new() -> Result<Self, crate::HostUnavailable> {
stdweb::initialize();
@ -71,21 +81,20 @@ impl Device {
fn supported_output_configs(
&self,
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
// TODO: right now cpal's API doesn't allow flexibility here
// "44100" and "2" (channels) have also been hard-coded in the rest of the code ; if
// this ever becomes more flexible, don't forget to change that
// According to https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createBuffer
// browsers must support 1 to 32 channels at leats and 8,000 Hz to 96,000 Hz.
//
// UPDATE: We can do this now. Might be best to use `crate::COMMON_SAMPLE_RATES` and
// filter out those that lay outside the range specified above.
Ok(vec![SupportedStreamConfigRange {
channels: 2,
min_sample_rate: ::SampleRate(44100),
max_sample_rate: ::SampleRate(44100),
sample_format: ::SampleFormat::F32,
}]
.into_iter())
let buffer_size = SupportedBufferSize::Range {
min: MIN_BUFFER_SIZE,
max: MAX_BUFFER_SIZE,
};
let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS)
.map(|channels| SupportedStreamConfigRange {
channels,
min_sample_rate: MIN_SAMPLE_RATE,
max_sample_rate: MAX_SAMPLE_RATE,
buffer_size: buffer_size.clone(),
sample_format: SUPPORTED_SAMPLE_FORMAT,
})
.collect();
Ok(configs.into_iter())
}
fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
@ -93,12 +102,15 @@ impl Device {
}
fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
// TODO: because it is hard coded, see supported_output_configs.
Ok(SupportedStreamConfig {
channels: 2,
sample_rate: ::SampleRate(44100),
sample_format: ::SampleFormat::F32,
})
const EXPECT: &str = "expected at least one valid webaudio stream config";
let mut configs: Vec<_> = self.supported_output_configs().expect(EXPECT).collect();
configs.sort_by(|a, b| a.cmp_default_heuristics(b));
let config = configs
.into_iter()
.next()
.expect(EXPECT)
.with_sample_rate(DEFAULT_SAMPLE_RATE);
Ok(config)
}
}
@ -169,7 +181,7 @@ impl DeviceTrait for Device {
fn build_output_stream_raw<D, E>(
&self,
_config: &StreamConfig,
config: &StreamConfig,
sample_format: SampleFormat,
data_callback: D,
error_callback: E,
@ -178,11 +190,20 @@ impl DeviceTrait for Device {
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
assert_eq!(
sample_format,
SampleFormat::F32,
"emscripten backend currently only supports `f32` data",
);
if !valid_config(config, sample_format) {
return Err(BuildStreamError::StreamConfigNotSupported);
}
let buffer_size_frames = match config.buffer_size {
BufferSize::Fixed(v) => {
if v == 0 {
return Err(BuildStreamError::StreamConfigNotSupported);
} else {
v as usize
}
}
BufferSize::Default => DEFAULT_BUFFER_SIZE,
};
// Create the stream.
let audio_ctxt_ref = js!(return new AudioContext()).into_reference().unwrap();
@ -199,7 +220,14 @@ impl DeviceTrait for Device {
// See also: The call to `set_timeout` at the end of the `audio_callback_fn` which creates
// the loop.
set_timeout(
|| audio_callback_fn::<D, E>(user_data_ptr as *mut c_void),
|| {
audio_callback_fn::<D, E>(
user_data_ptr as *mut c_void,
config,
sample_format,
buffer_size_frames,
)
},
10,
);
@ -223,12 +251,18 @@ impl StreamTrait for Stream {
// The first argument of the callback function (a `void*`) is a casted pointer to `self`
// and to the `callback` parameter that was passed to `run`.
fn audio_callback_fn<D, E>(user_data_ptr: *mut c_void)
where
fn audio_callback_fn<D, E>(
user_data_ptr: *mut c_void,
config: &StreamConfig,
sample_format: SampleFormat,
buffer_size_frames: usize,
) where
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
const SAMPLE_RATE: usize = 44100;
let num_channels = config.channels as usize;
let sample_rate = config.sample_rate.0;
let buffer_size_samples = buffer_size_frames * num_channels;
unsafe {
let user_data_ptr2 = user_data_ptr as *mut (&Stream, D, E);
@ -237,12 +271,11 @@ where
let audio_ctxt = &stream.audio_ctxt_ref;
// TODO: We should be re-using a buffer.
let mut temporary_buffer = vec![0.0; SAMPLE_RATE * 2 / 3];
let mut temporary_buffer = vec![0f32; buffer_size_samples];
{
let len = temporary_buffer.len();
let data = temporary_buffer.as_mut_ptr() as *mut ();
let sample_format = SampleFormat::F32;
let mut data = Data::from_parts(data, len, sample_format);
let now_secs: f64 = js!(@{audio_ctxt}.getOutputTimestamp().currentTime)
@ -253,7 +286,7 @@ where
// we estimate based on buffer size instead. Probably should use this, but it's only
// supported by firefox (2020-04-28).
// let latency_secs: f64 = js!(@{audio_ctxt}.outputLatency).try_into().unwrap();
let buffer_duration = frames_to_duration(len, SAMPLE_RATE);
let buffer_duration = frames_to_duration(len, sample_rate as usize);
let playback = callback
.add(buffer_duration)
.expect("`playback` occurs beyond representation supported by `StreamInstant`");
@ -273,19 +306,19 @@ where
typed_array
};
let num_channels = 2u32; // TODO: correct value
debug_assert_eq!(temporary_buffer.len() % num_channels as usize, 0);
js!(
var src_buffer = new Float32Array(@{typed_array}.buffer);
var context = @{audio_ctxt};
var buf_len = @{temporary_buffer.len() as u32};
var num_channels = @{num_channels};
var buffer_size_frames = @{buffer_size_frames as u32};
var num_channels = @{num_channels as u32};
var sample_rate = sample_rate;
var buffer = context.createBuffer(num_channels, buf_len / num_channels, 44100);
var buffer = context.createBuffer(num_channels, buffer_size_frames, sample_rate);
for (var channel = 0; channel < num_channels; ++channel) {
var buffer_content = buffer.getChannelData(channel);
for (var i = 0; i < buf_len / num_channels; ++i) {
for (var i = 0; i < buffer_size_frames; ++i) {
buffer_content[i] = src_buffer[i * num_channels + channel];
}
}
@ -299,7 +332,10 @@ where
// TODO: handle latency better ; right now we just use setInterval with the amount of sound
// data that is in each buffer ; this is obviously bad, and also the schedule is too tight
// and there may be underflows
set_timeout(|| audio_callback_fn::<D, E>(user_data_ptr), 330);
set_timeout(
|| audio_callback_fn::<D, E>(user_data_ptr, config, sample_format, buffer_size_frames),
buffer_size_frames as u32 * 1000 / sample_rate,
);
}
}
@ -348,6 +384,15 @@ fn is_webaudio_available() -> bool {
.unwrap()
}
// Whether or not the given stream configuration is valid for building a stream.
fn valid_config(conf: &StreamConfig, sample_format: SampleFormat) -> bool {
conf.channels <= MAX_CHANNELS
&& conf.channels >= MIN_CHANNELS
&& conf.sample_rate <= MAX_SAMPLE_RATE
&& conf.sample_rate >= MIN_SAMPLE_RATE
&& sample_format == SUPPORTED_SAMPLE_FORMAT
}
// Convert the given duration in frames at the given sample rate to a `std::time::Duration`.
fn frames_to_duration(frames: usize, rate: usize) -> std::time::Duration {
let secsf = frames as f64 / rate as f64;

View File

@ -1,8 +1,8 @@
use crate::{
BackendSpecificError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError,
InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig,
SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError,
COMMON_SAMPLE_RATES,
BackendSpecificError, BufferSize, Data, DefaultStreamConfigError, DeviceNameError,
DevicesError, InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig,
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError, COMMON_SAMPLE_RATES,
};
use std;
use std::ffi::OsString;
@ -27,6 +27,7 @@ use super::winapi::shared::mmreg;
use super::winapi::shared::winerror;
use super::winapi::shared::wtypes;
use super::winapi::Interface;
// https://msdn.microsoft.com/en-us/library/cc230355.aspx
use super::winapi::um::audioclient::{
self, IAudioClient, IID_IAudioClient, AUDCLNT_E_DEVICE_INVALIDATED,
@ -318,9 +319,11 @@ unsafe fn format_from_waveformatex_ptr(
// Unknown data format returned by GetMixFormat.
_ => return None,
};
let format = SupportedStreamConfig {
channels: (*waveformatex_ptr).nChannels as _,
sample_rate: SampleRate((*waveformatex_ptr).nSamplesPerSec),
buffer_size: SupportedBufferSize::Unknown,
sample_format,
};
Some(format)
@ -513,7 +516,7 @@ impl Device {
// TODO: Test the different sample formats?
// Create the supported formats.
let mut format = match format_from_waveformatex_ptr(default_waveformatex_ptr.0) {
let format = match format_from_waveformatex_ptr(default_waveformatex_ptr.0) {
Some(fmt) => fmt,
None => {
let description =
@ -525,8 +528,13 @@ impl Device {
};
let mut supported_formats = Vec::with_capacity(supported_sample_rates.len());
for rate in supported_sample_rates {
format.sample_rate = SampleRate(rate as _);
supported_formats.push(SupportedStreamConfigRange::from(format.clone()));
supported_formats.push(SupportedStreamConfigRange {
channels: format.channels.clone(),
min_sample_rate: SampleRate(rate as _),
max_sample_rate: SampleRate(rate as _),
buffer_size: format.buffer_size.clone(),
sample_format: format.sample_format.clone(),
})
}
Ok(supported_formats.into_iter())
}
@ -639,6 +647,16 @@ impl Device {
}
};
match config.buffer_size {
BufferSize::Fixed(_) => {
// TO DO: We need IAudioClient3 to get buffersize ranges first
// Otherwise the supported ranges are unknown. In the mean time
// the smallest buffersize is selected and used.
return Err(BuildStreamError::StreamConfigNotSupported);
}
BufferSize::Default => (),
};
// Computing the format and initializing the device.
let waveformatex = {
let format_attempt = config_to_waveformatextensible(config, sample_format)
@ -791,6 +809,16 @@ impl Device {
}
};
match config.buffer_size {
BufferSize::Fixed(_) => {
// TO DO: We need IAudioClient3 to get buffersize ranges first
// Otherwise the supported ranges are unknown. In the mean time
// the smallest buffersize is selected and used.
return Err(BuildStreamError::StreamConfigNotSupported);
}
BufferSize::Default => (),
};
// Computing the format and initializing the device.
let waveformatex = {
let format_attempt = config_to_waveformatextensible(config, sample_format)
@ -813,6 +841,7 @@ impl Device {
&format_attempt.Format,
ptr::null(),
);
match check_result(hresult) {
Err(ref e) if e.raw_os_error() == Some(AUDCLNT_E_DEVICE_INVALIDATED) => {
(*audio_client).Release();

View File

@ -7,10 +7,10 @@ use self::wasm_bindgen::prelude::*;
use self::wasm_bindgen::JsCast;
use self::web_sys::{AudioContext, AudioContextOptions};
use crate::{
BackendSpecificError, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError,
DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError,
SampleFormat, SampleRate, StreamConfig, StreamError, SupportedStreamConfig,
SupportedStreamConfigRange, SupportedStreamConfigsError,
BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError,
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize,
SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError,
};
use std::ops::DerefMut;
use std::sync::{Arc, Mutex, RwLock};
@ -39,6 +39,9 @@ const MAX_CHANNELS: u16 = 32;
const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000);
const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000);
const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100);
const MIN_BUFFER_SIZE: u32 = 1;
const MAX_BUFFER_SIZE: u32 = std::u32::MAX;
const DEFAULT_BUFFER_SIZE: usize = 2048;
const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32;
impl Host {
@ -93,11 +96,16 @@ impl Device {
fn supported_output_configs(
&self,
) -> Result<SupportedOutputConfigs, SupportedStreamConfigsError> {
let buffer_size = SupportedBufferSize::Range {
min: MIN_BUFFER_SIZE,
max: MAX_BUFFER_SIZE,
};
let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS)
.map(|channels| SupportedStreamConfigRange {
channels,
min_sample_rate: MIN_SAMPLE_RATE,
max_sample_rate: MAX_SAMPLE_RATE,
buffer_size: buffer_size.clone(),
sample_format: SUPPORTED_SAMPLE_FORMAT,
})
.collect();
@ -117,7 +125,7 @@ impl Device {
configs.sort_by(|a, b| a.cmp_default_heuristics(b));
let config = configs
.into_iter()
.next()
.last()
.expect(EXPECT)
.with_sample_rate(DEFAULT_SAMPLE_RATE);
Ok(config)
@ -190,11 +198,20 @@ impl DeviceTrait for Device {
}
let n_channels = config.channels as usize;
// Use a buffer period of 1/3s for this early proof of concept.
// TODO: Change this to the requested buffer size when updating for the buffer size API.
let buffer_size_frames = (config.sample_rate.0 as f64 / 3.0).round() as usize;
let buffer_size_frames = match config.buffer_size {
BufferSize::Fixed(v) => {
if v == 0 {
return Err(BuildStreamError::StreamConfigNotSupported);
} else {
v as usize
}
}
BufferSize::Default => DEFAULT_BUFFER_SIZE,
};
let buffer_size_samples = buffer_size_frames * n_channels;
let buffer_time_step_secs = buffer_time_step_secs(buffer_size_frames, config.sample_rate);
let data_callback = Arc::new(Mutex::new(Box::new(data_callback)));
// Create the WebAudio stream.

View File

@ -180,6 +180,22 @@ pub type ChannelCount = u16;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct SampleRate(pub u32);
/// The desired number of frames for the hardware buffer.
pub type FrameCount = u32;
/// The buffer size used by the device.
///
/// Default is used when no specific buffer size is set and uses the default
/// behavior of the given host. Note, the default buffer size may be surprisingly
/// large, leading to latency issues. If low latency is desired, Fixed(BufferSize)
/// should be used in accordance with the SupportedBufferSize range produced by
/// the SupportedStreamConfig API.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum BufferSize {
Default,
Fixed(FrameCount),
}
/// The set of parameters used to describe how to open a stream.
///
/// The sample format is omitted in favour of using a sample type.
@ -187,6 +203,19 @@ pub struct SampleRate(pub u32);
pub struct StreamConfig {
pub channels: ChannelCount,
pub sample_rate: SampleRate,
pub buffer_size: BufferSize,
}
/// Describes the minimum and maximum supported buffer size for the device
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum SupportedBufferSize {
Range {
min: FrameCount,
max: FrameCount,
},
/// In the case that the platform provides no way of getting the default
/// buffersize before starting a stream.
Unknown,
}
/// Describes a range of supported stream configurations, retrieved via the
@ -198,6 +227,8 @@ pub struct SupportedStreamConfigRange {
pub(crate) min_sample_rate: SampleRate,
/// Maximum value for the samples rate of the supported formats.
pub(crate) max_sample_rate: SampleRate,
/// Buffersize ranges supported by the device
pub(crate) buffer_size: SupportedBufferSize,
/// Type of data expected by the device.
pub(crate) sample_format: SampleFormat,
}
@ -208,6 +239,7 @@ pub struct SupportedStreamConfigRange {
pub struct SupportedStreamConfig {
channels: ChannelCount,
sample_rate: SampleRate,
buffer_size: SupportedBufferSize,
sample_format: SampleFormat,
}
@ -289,6 +321,10 @@ impl SupportedStreamConfig {
self.sample_rate
}
pub fn buffer_size(&self) -> &SupportedBufferSize {
&self.buffer_size
}
pub fn sample_format(&self) -> SampleFormat {
self.sample_format
}
@ -297,6 +333,7 @@ impl SupportedStreamConfig {
StreamConfig {
channels: self.channels,
sample_rate: self.sample_rate,
buffer_size: BufferSize::Default,
}
}
}
@ -492,11 +529,15 @@ impl SupportedStreamConfigRange {
self.max_sample_rate
}
pub fn buffer_size(&self) -> &SupportedBufferSize {
&self.buffer_size
}
pub fn sample_format(&self) -> SampleFormat {
self.sample_format
}
/// Retrieve a `SupportedStreamConfig` with the given sample rate.
/// Retrieve a `SupportedStreamConfig` with the given sample rate and buffer size.
///
/// **panic!**s if the given `sample_rate` is outside the range specified within this
/// `SupportedStreamConfigRange` instance.
@ -504,8 +545,9 @@ impl SupportedStreamConfigRange {
assert!(self.min_sample_rate <= sample_rate && sample_rate <= self.max_sample_rate);
SupportedStreamConfig {
channels: self.channels,
sample_format: self.sample_format,
sample_rate,
sample_format: self.sample_format,
buffer_size: self.buffer_size,
}
}
@ -516,6 +558,7 @@ impl SupportedStreamConfigRange {
channels: self.channels,
sample_rate: self.max_sample_rate,
sample_format: self.sample_format,
buffer_size: self.buffer_size,
}
}
@ -590,24 +633,81 @@ impl SupportedStreamConfigRange {
}
}
#[test]
fn test_cmp_default_heuristics() {
let mut formats = vec![
SupportedStreamConfigRange {
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
channels: 2,
min_sample_rate: SampleRate(1),
max_sample_rate: SampleRate(96000),
sample_format: SampleFormat::F32,
},
SupportedStreamConfigRange {
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
channels: 1,
min_sample_rate: SampleRate(1),
max_sample_rate: SampleRate(96000),
sample_format: SampleFormat::F32,
},
SupportedStreamConfigRange {
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
channels: 2,
min_sample_rate: SampleRate(1),
max_sample_rate: SampleRate(96000),
sample_format: SampleFormat::I16,
},
SupportedStreamConfigRange {
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
channels: 2,
min_sample_rate: SampleRate(1),
max_sample_rate: SampleRate(96000),
sample_format: SampleFormat::U16,
},
SupportedStreamConfigRange {
buffer_size: SupportedBufferSize::Range { min: 256, max: 512 },
channels: 2,
min_sample_rate: SampleRate(1),
max_sample_rate: SampleRate(22050),
sample_format: SampleFormat::F32,
},
];
formats.sort_by(|a, b| a.cmp_default_heuristics(b));
// lowest-priority first:
assert_eq!(formats[0].sample_format(), SampleFormat::F32);
assert_eq!(formats[0].min_sample_rate(), SampleRate(1));
assert_eq!(formats[0].max_sample_rate(), SampleRate(96000));
assert_eq!(formats[0].channels(), 1);
assert_eq!(formats[1].sample_format(), SampleFormat::U16);
assert_eq!(formats[1].min_sample_rate(), SampleRate(1));
assert_eq!(formats[1].max_sample_rate(), SampleRate(96000));
assert_eq!(formats[1].channels(), 2);
assert_eq!(formats[2].sample_format(), SampleFormat::I16);
assert_eq!(formats[2].min_sample_rate(), SampleRate(1));
assert_eq!(formats[2].max_sample_rate(), SampleRate(96000));
assert_eq!(formats[2].channels(), 2);
assert_eq!(formats[3].sample_format(), SampleFormat::F32);
assert_eq!(formats[3].min_sample_rate(), SampleRate(1));
assert_eq!(formats[3].max_sample_rate(), SampleRate(22050));
assert_eq!(formats[3].channels(), 2);
assert_eq!(formats[4].sample_format(), SampleFormat::F32);
assert_eq!(formats[4].min_sample_rate(), SampleRate(1));
assert_eq!(formats[4].max_sample_rate(), SampleRate(96000));
assert_eq!(formats[4].channels(), 2);
}
impl From<SupportedStreamConfig> for StreamConfig {
fn from(conf: SupportedStreamConfig) -> Self {
conf.config()
}
}
impl From<SupportedStreamConfig> for SupportedStreamConfigRange {
#[inline]
fn from(format: SupportedStreamConfig) -> SupportedStreamConfigRange {
SupportedStreamConfigRange {
channels: format.channels,
min_sample_rate: format.sample_rate,
max_sample_rate: format.sample_rate,
sample_format: format.sample_format,
}
}
}
// If a backend does not provide an API for retrieving supported formats, we query it with a bunch
// of commonly used rates. This is always the case for wasapi and is sometimes the case for alsa.
//

View File

@ -1,7 +1,7 @@
//! Platform-specific items.
//!
//! This module also contains the implementation of the platform's dynamically dispatched `Host`
//! type and its associated `EventLoop`, `Device`, `StreamId` and other associated types. These
//! type and its associated `Device`, `StreamId` and other associated types. These
//! types are useful in the case that users require switching between audio host APIs at runtime.
#[doc(inline)]
@ -27,7 +27,7 @@ pub use self::platform_impl::*;
// }
// ```
//
// And so on for Device, Devices, EventLoop, Host, StreamId, SupportedInputConfigs,
// And so on for Device, Devices, Host, StreamId, SupportedInputConfigs,
// SupportedOutputConfigs and all their necessary trait implementations.
// ```
macro_rules! impl_platform_host {
@ -558,8 +558,8 @@ mod platform_impl {
)))]
mod platform_impl {
pub use crate::host::null::{
Device as NullDevice, Devices as NullDevices, EventLoop as NullEventLoop, Host as NullHost,
StreamId as NullStreamId, SupportedInputConfigs as NullSupportedInputConfigs,
Device as NullDevice, Devices as NullDevices, Host as NullHost,
SupportedInputConfigs as NullSupportedInputConfigs,
SupportedOutputConfigs as NullSupportedOutputConfigs,
};