diff --git a/CHANGELOG.md b/CHANGELOG.md index bf88d2465..09c909953 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ - Add `Sample::bits_per_sample` method. - Update `audio_thread_priority` to 0.34. - AAudio: Configure buffer to ensure consistent callback buffer sizes. +- AAudio: Fix the buffer size range detection by querying the AudioService property correctly. - ALSA: Improve `BufferSize::Fixed` precision and audio callback performance. - ALSA: Change `BufferSize::Default` to use the device defaults. - ALSA: Change card enumeration to work like `aplay -L` does. diff --git a/src/host/aaudio/android_media.rs b/src/host/aaudio/android_media.rs deleted file mode 100644 index f2a958031..000000000 --- a/src/host/aaudio/android_media.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::sync::Arc; - -extern crate jni; - -use self::jni::Executor; -use self::jni::{errors::Result as JResult, JNIEnv, JavaVM}; - -// constants from android.media.AudioFormat -pub const ENCODING_PCM_16BIT: i32 = 2; -pub const ENCODING_PCM_FLOAT: i32 = 4; -pub const CHANNEL_OUT_MONO: i32 = 4; -pub const CHANNEL_OUT_STEREO: i32 = 12; - -fn with_attached(closure: F) -> JResult -where - F: FnOnce(&mut JNIEnv) -> JResult, -{ - let android_context = ndk_context::android_context(); - let vm = Arc::new(unsafe { JavaVM::from_raw(android_context.vm().cast())? }); - Executor::new(vm).with_attached(|env| closure(env)) -} - -fn get_min_buffer_size( - class: &'static str, - sample_rate: i32, - channel_mask: i32, - format: i32, -) -> i32 { - // Unwrapping everything because these operations are not expected to fail - // or throw exceptions. Android returns negative values for invalid parameters, - // which is what we expect. - with_attached(|env| { - let class = env.find_class(class).unwrap(); - env.call_static_method( - class, - "getMinBufferSize", - "(III)I", - &[sample_rate.into(), channel_mask.into(), format.into()], - ) - .unwrap() - .i() - }) - .unwrap() -} - -pub fn get_audio_track_min_buffer_size(sample_rate: i32, channel_mask: i32, format: i32) -> i32 { - get_min_buffer_size( - "android/media/AudioTrack", - sample_rate, - channel_mask, - format, - ) -} - -pub fn get_audio_record_min_buffer_size(sample_rate: i32, channel_mask: i32, format: i32) -> i32 { - get_min_buffer_size( - "android/media/AudioRecord", - sample_rate, - channel_mask, - format, - ) -} diff --git a/src/host/aaudio/java_interface.rs b/src/host/aaudio/java_interface.rs index 508496f5e..ab778517e 100644 --- a/src/host/aaudio/java_interface.rs +++ b/src/host/aaudio/java_interface.rs @@ -1,4 +1,5 @@ mod audio_features; +mod audio_manager; mod definitions; mod devices_info; mod utils; diff --git a/src/host/aaudio/java_interface/audio_manager.rs b/src/host/aaudio/java_interface/audio_manager.rs new file mode 100644 index 000000000..2f8d386e7 --- /dev/null +++ b/src/host/aaudio/java_interface/audio_manager.rs @@ -0,0 +1,33 @@ +use super::{ + utils::{ + get_context, get_property, get_system_service, with_attached, JNIEnv, JObject, JResult, + }, + AudioManager, Context, +}; + +impl AudioManager { + /// Get the frames per buffer using Android Java API + pub fn get_frames_per_buffer() -> Result { + let context = get_context(); + + with_attached(context, |env, context| get_frames_per_buffer(env, &context)) + .map_err(|error| error.to_string()) + } +} + +fn get_frames_per_buffer<'j>(env: &mut JNIEnv<'j>, context: &JObject<'j>) -> JResult { + let audio_manager = get_system_service(env, context, Context::AUDIO_SERVICE)?; + + let frames_per_buffer = get_property( + env, + &audio_manager, + AudioManager::PROPERTY_OUTPUT_FRAMES_PER_BUFFER, + )?; + + let frames_per_buffer_string = String::from(env.get_string(&frames_per_buffer)?); + + // TODO: Use jni::errors::Error::ParseFailed instead of jni::errors::Error::JniCall once jni > v0.21.1 is released + frames_per_buffer_string + .parse::() + .map_err(|e| jni::errors::Error::JniCall(jni::errors::JniError::Unknown)) +} diff --git a/src/host/aaudio/java_interface/definitions.rs b/src/host/aaudio/java_interface/definitions.rs index a7eaa6047..b2e35c522 100644 --- a/src/host/aaudio/java_interface/definitions.rs +++ b/src/host/aaudio/java_interface/definitions.rs @@ -21,8 +21,6 @@ impl PackageManager { pub(crate) struct AudioManager; impl AudioManager { - pub const PROPERTY_OUTPUT_SAMPLE_RATE: &'static str = - "android.media.property.OUTPUT_SAMPLE_RATE"; pub const PROPERTY_OUTPUT_FRAMES_PER_BUFFER: &'static str = "android.media.property.OUTPUT_FRAMES_PER_BUFFER"; diff --git a/src/host/aaudio/mod.rs b/src/host/aaudio/mod.rs index a1e48964b..5fbf4330a 100644 --- a/src/host/aaudio/mod.rs +++ b/src/host/aaudio/mod.rs @@ -1,4 +1,3 @@ -use std::cell::RefCell; use std::cmp; use std::convert::TryInto; use std::time::{Duration, Instant}; @@ -7,30 +6,30 @@ use std::vec::IntoIter as VecIntoIter; extern crate ndk; use convert::{stream_instant, to_stream_instant}; -use java_interface::{AudioDeviceDirection, AudioDeviceInfo}; +use java_interface::{AudioDeviceDirection, AudioDeviceInfo, AudioManager}; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, InputStreamTimestamp, OutputCallbackInfo, OutputStreamTimestamp, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, - SizedSample, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, + StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; -mod android_media; mod convert; mod java_interface; -use self::android_media::{get_audio_record_min_buffer_size, get_audio_track_min_buffer_size}; use self::ndk::audio::AudioStream; +// constants from android.media.AudioFormat +const CHANNEL_OUT_MONO: i32 = 4; +const CHANNEL_OUT_STEREO: i32 = 12; + // Android Java API supports up to 8 channels // TODO: more channels available in native AAudio -const CHANNEL_MASKS: [i32; 2] = [ - android_media::CHANNEL_OUT_MONO, - android_media::CHANNEL_OUT_STEREO, -]; +// Maps channel masks to their corresponding channel counts +const CHANNEL_CONFIGS: [(i32, u16); 2] = [(CHANNEL_OUT_MONO, 1), (CHANNEL_OUT_STEREO, 2)]; const SAMPLE_RATES: [i32; 13] = [ 5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176_400, 192_000, @@ -91,18 +90,8 @@ impl HostTrait for Host { } } -fn buffer_size_range_for_params( - is_output: bool, - sample_rate: i32, - channel_mask: i32, - android_format: i32, -) -> SupportedBufferSize { - let min_buffer_size = if is_output { - get_audio_track_min_buffer_size(sample_rate, channel_mask, android_format) - } else { - get_audio_record_min_buffer_size(sample_rate, channel_mask, android_format) - }; - if min_buffer_size > 0 { +fn buffer_size_range() -> SupportedBufferSize { + if let Ok(min_buffer_size) = AudioManager::get_frames_per_buffer() { SupportedBufferSize::Range { min: min_buffer_size as u32, max: i32::MAX as u32, @@ -112,34 +101,21 @@ fn buffer_size_range_for_params( } } -fn default_supported_configs(is_output: bool) -> VecIntoIter { - // Have to "brute force" the parameter combinations with getMinBufferSize +fn default_supported_configs() -> VecIntoIter { const FORMATS: [SampleFormat; 2] = [SampleFormat::I16, SampleFormat::F32]; - let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_MASKS.len() * FORMATS.len()); + let buffer_size = buffer_size_range(); + let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_CONFIGS.len() * FORMATS.len()); for sample_format in &FORMATS { - let android_format = if *sample_format == SampleFormat::I16 { - android_media::ENCODING_PCM_16BIT - } else { - android_media::ENCODING_PCM_FLOAT - }; - for (mask_idx, channel_mask) in CHANNEL_MASKS.iter().enumerate() { - let channel_count = mask_idx + 1; + for (_channel_mask, channel_count) in &CHANNEL_CONFIGS { for sample_rate in &SAMPLE_RATES { - if let SupportedBufferSize::Range { min, max } = buffer_size_range_for_params( - is_output, - *sample_rate, - *channel_mask, - android_format, - ) { - output.push(SupportedStreamConfigRange { - channels: channel_count as u16, - min_sample_rate: SampleRate(*sample_rate as u32), - max_sample_rate: SampleRate(*sample_rate as u32), - buffer_size: SupportedBufferSize::Range { min, max }, - sample_format: *sample_format, - }); - } + output.push(SupportedStreamConfigRange { + channels: *channel_count, + min_sample_rate: SampleRate(*sample_rate as u32), + max_sample_rate: SampleRate(*sample_rate as u32), + buffer_size: buffer_size.clone(), + sample_format: *sample_format, + }); } } } @@ -147,10 +123,7 @@ fn default_supported_configs(is_output: bool) -> VecIntoIter VecIntoIter { +fn device_supported_configs(device: &AudioDeviceInfo) -> VecIntoIter { let sample_rates = if !device.sample_rates.is_empty() { device.sample_rates.as_slice() } else { @@ -171,6 +144,7 @@ fn device_supported_configs( &ALL_FORMATS }; + let buffer_size = buffer_size_range(); let mut output = Vec::with_capacity(sample_rates.len() * channel_counts.len() * formats.len()); for sample_rate in sample_rates { for channel_count in channel_counts { @@ -180,25 +154,13 @@ fn device_supported_configs( // TODO: more channels available in native AAudio continue; } - let channel_mask = CHANNEL_MASKS[*channel_count as usize - 1]; for format in formats { - let (android_format, sample_format) = match format { - SampleFormat::I16 => (android_media::ENCODING_PCM_16BIT, SampleFormat::I16), - SampleFormat::F32 => (android_media::ENCODING_PCM_FLOAT, SampleFormat::F32), - _ => panic!("Unexpected format"), - }; - let buffer_size = buffer_size_range_for_params( - is_output, - *sample_rate, - channel_mask, - android_format, - ); output.push(SupportedStreamConfigRange { channels: cmp::min(*channel_count as u16, 2u16), min_sample_rate: SampleRate(*sample_rate as u32), max_sample_rate: SampleRate(*sample_rate as u32), - buffer_size, - sample_format, + buffer_size: buffer_size.clone(), + sample_format: *format, }); } } @@ -339,9 +301,9 @@ impl DeviceTrait for Device { &self, ) -> Result { if let Some(info) = &self.0 { - Ok(device_supported_configs(info, false)) + Ok(device_supported_configs(info)) } else { - Ok(default_supported_configs(false)) + Ok(default_supported_configs()) } } @@ -349,9 +311,9 @@ impl DeviceTrait for Device { &self, ) -> Result { if let Some(info) = &self.0 { - Ok(device_supported_configs(info, true)) + Ok(device_supported_configs(info)) } else { - Ok(default_supported_configs(true)) + Ok(default_supported_configs()) } }