Merge pull request #5 from tomaka/add-play-pause

Add play() and pause() functions
This commit is contained in:
tomaka 2014-12-23 15:28:43 +01:00
commit 73cd809636
5 changed files with 88 additions and 30 deletions

View File

@ -18,6 +18,7 @@ fn main() {
}); });
loop { loop {
{
let mut buffer = channel.append_data(1, cpal::SamplesRate(44100), 32768); let mut buffer = channel.append_data(1, cpal::SamplesRate(44100), 32768);
for sample in buffer.iter_mut() { for sample in buffer.iter_mut() {
@ -25,4 +26,7 @@ fn main() {
*sample = value; *sample = value;
} }
} }
channel.play();
}
} }

View File

@ -5,6 +5,7 @@ use std::io::BufReader;
fn main() { fn main() {
let mut channel = cpal::Voice::new(); let mut channel = cpal::Voice::new();
channel.play();
let mut decoder = vorbis::Decoder::new(BufReader::new(include_bin!("music.ogg"))) let mut decoder = vorbis::Decoder::new(BufReader::new(include_bin!("music.ogg")))
.unwrap(); .unwrap();
@ -20,7 +21,9 @@ fn main() {
continue 'main; continue 'main;
} }
let mut buffer = channel.append_data(channels, cpal::SamplesRate(rate as u32), data.len()); {
let mut buffer = channel.append_data(channels, cpal::SamplesRate(rate as u32),
data.len());
let mut buffer = buffer.iter_mut(); let mut buffer = buffer.iter_mut();
loop { loop {
@ -37,5 +40,8 @@ fn main() {
} }
} }
} }
channel.play();
}
} }
} }

View File

@ -63,6 +63,15 @@ impl Voice {
buffer: Vec::from_elem(elements, unsafe { mem::uninitialized() }) buffer: Vec::from_elem(elements, unsafe { mem::uninitialized() })
} }
} }
pub fn play(&mut self) {
// already playing
//unimplemented!()
}
pub fn pause(&mut self) {
unimplemented!()
}
} }
impl Drop for Voice { impl Drop for Voice {

View File

@ -27,8 +27,15 @@ for e in buffer.iter_mut() {
This is the case if the device doesn't have enough space available. **It happens very often**, This is the case if the device doesn't have enough space available. **It happens very often**,
this is not some obscure situation that can be ignored. this is not some obscure situation that can be ignored.
After you have submitted data for the first time, call `play`:
```no_run
# let mut voice = cpal::Voice::new();
voice.play();
```
The audio device of the user will read the buffer that you sent, and play it. If the audio device The audio device of the user will read the buffer that you sent, and play it. If the audio device
reaches the end of the data, it will stop playing. You **must** continuously fill the buffer by reaches the end of the data, it will stop playing. You must continuously fill the buffer by
calling `append_data` repeatedly if you don't want the audio to stop playing. calling `append_data` repeatedly if you don't want the audio to stop playing.
# Native format # Native format
@ -212,6 +219,25 @@ impl Voice {
} }
} }
} }
/// Sends a command to the audio device that it should start playing.
///
/// Has no effect is the voice was already playing.
///
/// Only call this after you have submitted some data, otherwise you may hear
/// some glitches.
pub fn play(&mut self) {
self.0.play()
}
/// Sends a command to the audio device that it should stop playing.
///
/// Has no effect is the voice was already paused.
///
/// If you call `play` afterwards, the playback will resume exactly where it was.
pub fn pause(&mut self) {
self.0.pause()
}
} }
impl<'a, T> Deref<[T]> for Buffer<'a, T> { impl<'a, T> Deref<[T]> for Buffer<'a, T> {

View File

@ -13,15 +13,13 @@ pub struct Voice {
bytes_per_frame: winapi::WORD, bytes_per_frame: winapi::WORD,
samples_per_second: winapi::DWORD, samples_per_second: winapi::DWORD,
bits_per_sample: winapi::WORD, bits_per_sample: winapi::WORD,
started: bool, playing: bool,
} }
pub struct Buffer<'a, T> { pub struct Buffer<'a, T> {
audio_client: *mut winapi::IAudioClient,
render_client: *mut winapi::IAudioRenderClient, render_client: *mut winapi::IAudioRenderClient,
buffer: CVec<T>, buffer: CVec<T>,
frames: winapi::UINT32, frames: winapi::UINT32,
start_on_drop: bool,
} }
impl Voice { impl Voice {
@ -83,18 +81,39 @@ impl Voice {
}; };
let buffer = Buffer { let buffer = Buffer {
audio_client: self.audio_client,
render_client: self.render_client, render_client: self.render_client,
buffer: buffer, buffer: buffer,
frames: frames_available, frames: frames_available,
start_on_drop: !self.started,
}; };
self.started = true;
return buffer; return buffer;
} }
} }
} }
pub fn play(&mut self) {
if !self.playing {
unsafe {
let f = self.audio_client.as_mut().unwrap().lpVtbl.as_ref().unwrap().Start;
let hresult = f(self.audio_client);
check_result(hresult).unwrap();
}
}
self.playing = true;
}
pub fn pause(&mut self) {
if self.playing {
unsafe {
let f = self.audio_client.as_mut().unwrap().lpVtbl.as_ref().unwrap().Stop;
let hresult = f(self.audio_client);
check_result(hresult).unwrap();
}
}
self.playing = false;
}
} }
impl Drop for Voice { impl Drop for Voice {
@ -124,12 +143,6 @@ impl<'a, T> Buffer<'a, T> {
let f = self.render_client.as_mut().unwrap().lpVtbl.as_ref().unwrap().ReleaseBuffer; let f = self.render_client.as_mut().unwrap().lpVtbl.as_ref().unwrap().ReleaseBuffer;
let hresult = f(self.render_client, self.frames as u32, 0); let hresult = f(self.render_client, self.frames as u32, 0);
check_result(hresult).unwrap(); check_result(hresult).unwrap();
if self.start_on_drop {
let f = self.audio_client.as_mut().unwrap().lpVtbl.as_ref().unwrap().Start;
let hresult = f(self.audio_client);
check_result(hresult).unwrap();
}
}; };
} }
} }
@ -237,7 +250,7 @@ fn init() -> Result<Voice, String> {
bytes_per_frame: format.nBlockAlign, bytes_per_frame: format.nBlockAlign,
samples_per_second: format.nSamplesPerSec, samples_per_second: format.nSamplesPerSec,
bits_per_sample: format.wBitsPerSample, bits_per_sample: format.wBitsPerSample,
started: false, playing: false,
}) })
} }
} }