Implement basic playback with cpal

This commit is contained in:
Rob Watson 2020-09-10 14:52:45 +02:00
parent 62fda0fe6c
commit ba54ab3890
3 changed files with 48 additions and 17 deletions

View File

@ -12,7 +12,12 @@ use yewtil::future::LinkFuture;
const FILE_LOAD_CHUNK_SIZE: usize = 4096; const FILE_LOAD_CHUNK_SIZE: usize = 4096;
pub type AudioData = Arc<web_sys::AudioBuffer>; #[derive(Debug)]
pub struct AudioData {
pub buffers: Vec<Vec<f32>>,
pub num_channels: u32,
pub sample_rate: f32,
}
#[derive(Debug)] #[derive(Debug)]
pub struct AudioAgent { pub struct AudioAgent {
@ -53,7 +58,7 @@ impl Agent for AudioAgent {
type Reach = Context<Self>; type Reach = Context<Self>;
type Message = Msg; type Message = Msg;
type Input = Request; type Input = Request;
type Output = Result<AudioData, String>; type Output = Result<Arc<AudioData>, String>;
fn create(link: AgentLink<Self>) -> Self { fn create(link: AgentLink<Self>) -> Self {
// TODO: where should the AudioContext be initialized and stored? // TODO: where should the AudioContext be initialized and stored?
@ -79,7 +84,18 @@ impl Agent for AudioAgent {
} }
Msg::AudioDecoded(samples) => { Msg::AudioDecoded(samples) => {
let audio_buffer = web_sys::AudioBuffer::from(samples); let audio_buffer = web_sys::AudioBuffer::from(samples);
let audio_data = Arc::new(audio_buffer); let num_channels = audio_buffer.number_of_channels();
let sample_rate = audio_buffer.sample_rate();
let buffers: Vec<Vec<f32>> = (0..num_channels)
.map(|i| audio_buffer.get_channel_data(i).unwrap())
.collect();
let audio_data = AudioData {
buffers,
num_channels,
sample_rate,
};
let audio_data = Arc::new(audio_data);
for subscriber in self.subscribers.iter() { for subscriber in self.subscribers.iter() {
self.link.respond(*subscriber, Ok(audio_data.clone())); self.link.respond(*subscriber, Ok(audio_data.clone()));

View File

@ -1,5 +1,6 @@
use crate::agents::audio_agent::{AudioAgent, AudioData}; use crate::agents::audio_agent::{AudioAgent, AudioData};
use crate::utils; use crate::utils;
use std::sync::Arc;
use wasm_bindgen::JsCast; use wasm_bindgen::JsCast;
use web_sys::HtmlCanvasElement; use web_sys::HtmlCanvasElement;
use yew::agent::Bridged; use yew::agent::Bridged;
@ -10,13 +11,13 @@ use yew::Bridge;
pub struct Canvas { pub struct Canvas {
_audio_agent: Box<dyn Bridge<AudioAgent>>, _audio_agent: Box<dyn Bridge<AudioAgent>>,
canvas_node: NodeRef, canvas_node: NodeRef,
audio_data: Option<AudioData>, audio_data: Option<Arc<AudioData>>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum Msg { pub enum Msg {
Reset, Reset,
AudioAgentMessage(Result<AudioData, String>), AudioAgentMessage(Result<Arc<AudioData>, String>),
} }
impl Component for Canvas { impl Component for Canvas {
@ -55,7 +56,7 @@ impl Canvas {
self.redraw_canvas(); self.redraw_canvas();
} }
fn handle_samples_loaded(&mut self, audio_data: AudioData) { fn handle_samples_loaded(&mut self, audio_data: Arc<AudioData>) {
self.audio_data = Some(audio_data); self.audio_data = Some(audio_data);
self.redraw_canvas(); self.redraw_canvas();
} }
@ -85,10 +86,10 @@ impl Canvas {
if self.audio_data.is_some() { if self.audio_data.is_some() {
let audio_data = self.audio_data.as_ref().unwrap(); let audio_data = self.audio_data.as_ref().unwrap();
let num_channels = audio_data.number_of_channels(); let num_channels = audio_data.num_channels;
for chan in 0..num_channels { for chan in 0..num_channels {
let channel_data = audio_data.get_channel_data(chan).unwrap(); let channel_data = &audio_data.buffers[chan as usize];
let chunks = utils::chunks_fixed(&channel_data, canvas_element.width() as usize); let chunks = utils::chunks_fixed(&channel_data, canvas_element.width() as usize);
chunks.enumerate().for_each(|(i, chunk)| { chunks.enumerate().for_each(|(i, chunk)| {
@ -108,8 +109,6 @@ impl Canvas {
context.move_to(i as f64, mid - (len / 2.0)); context.move_to(i as f64, mid - (len / 2.0));
context.line_to(i as f64, mid + (len / 2.0)); context.line_to(i as f64, mid + (len / 2.0));
context.stroke(); context.stroke();
// ConsoleService::log(&format!("index {}: max {}, pc {}, len {}", i, max, pc, len));
}); });
} }
} }

View File

@ -1,6 +1,7 @@
use crate::agents::audio_agent::{self, AudioAgent, AudioData}; use crate::agents::audio_agent::{self, AudioAgent, AudioData};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use cpal::{Device, SampleFormat, SampleRate, Stream, StreamConfig}; use cpal::{Device, Sample, SampleFormat, SampleRate, Stream, StreamConfig};
use std::sync::Arc;
use yew::agent::Dispatcher; use yew::agent::Dispatcher;
use yew::prelude::*; use yew::prelude::*;
use yew::services::ConsoleService; use yew::services::ConsoleService;
@ -15,12 +16,12 @@ pub struct Player {
status: Status, status: Status,
stream: Option<Stream>, stream: Option<Stream>,
audio_agent: Box<dyn Bridge<AudioAgent>>, audio_agent: Box<dyn Bridge<AudioAgent>>,
audio_data: Option<AudioData>, audio_data: Option<Arc<AudioData>>,
} }
pub enum Msg { pub enum Msg {
Play, Play,
AudioAgentMessage(Result<AudioData, String>), AudioAgentMessage(Result<Arc<AudioData>, String>),
} }
impl Component for Player { impl Component for Player {
@ -63,7 +64,7 @@ impl Component for Player {
} }
impl Player { impl Player {
fn handle_samples_loaded(&mut self, audio_data: AudioData) -> ShouldRender { fn handle_samples_loaded(&mut self, audio_data: Arc<AudioData>) -> ShouldRender {
ConsoleService::log("Player: samples loaded"); ConsoleService::log("Player: samples loaded");
self.audio_data = Some(audio_data); self.audio_data = Some(audio_data);
true true
@ -84,7 +85,7 @@ impl Player {
let config = device let config = device
.supported_output_configs() .supported_output_configs()
.unwrap() .unwrap()
.nth(1) .nth(0)
.unwrap() .unwrap()
.with_sample_rate(SampleRate(44100)); .with_sample_rate(SampleRate(44100));
@ -106,14 +107,29 @@ impl Player {
} }
} }
fn run<T>(device: &Device, config: &StreamConfig, audio_data: AudioData) -> Stream fn run<T>(device: &Device, config: &StreamConfig, audio_data: Arc<AudioData>) -> Stream
where where
T: cpal::Sample, T: cpal::Sample,
{ {
let err_fn = |err| ConsoleService::warn(&format!("an error occurred on stream: {}", err)); let err_fn = |err| ConsoleService::warn(&format!("an error occurred on stream: {}", err));
let num_channels = audio_data.num_channels as usize;
let mut idx: usize = 0; // current playback frame
let stream = device let stream = device
.build_output_stream(config, move |data: &mut [T], _| {}, err_fn) .build_output_stream(
config,
move |output: &mut [T], _| {
for (i, frame) in output.chunks_mut(num_channels).enumerate() {
for (j, sample) in frame.iter_mut().enumerate() {
let buffer = &audio_data.buffers[j];
let value: T = Sample::from::<f32>(&buffer[idx]);
*sample = value;
}
idx += 1;
}
},
err_fn,
)
.unwrap(); .unwrap();
stream.play().unwrap(); stream.play().unwrap();