diff --git a/audio/AidlConversionXsdc.cpp b/audio/AidlConversionXsdc.cpp new file mode 100644 index 0000000..c404d67 --- /dev/null +++ b/audio/AidlConversionXsdc.cpp @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_AidlXsdc" +#include +#include +#include +#include + +#include "core-impl/AidlConversionXsdc.h" + +using aidl::android::media::audio::common::AudioFormatDescription; + +namespace xsd = android::audio::policy::configuration; + +namespace aidl::android::hardware::audio::core::internal { + +ConversionResult xsdc2aidl_AudioFormatDescription(const std::string& xsdc) { + return legacy2aidl_audio_format_t_AudioFormatDescription(::android::formatFromString(xsdc)); +} + +ConversionResult xsdc2aidl_SurroundFormatFamily( + const ::xsd::SurroundFormats::Format& xsdc) { + SurroundSoundConfig::SurroundFormatFamily aidl; + aidl.primaryFormat = VALUE_OR_RETURN(xsdc2aidl_AudioFormatDescription(xsdc.getName())); + if (xsdc.hasSubformats()) { + aidl.subFormats = VALUE_OR_RETURN(convertContainer>( + xsdc.getSubformats(), xsdc2aidl_AudioFormatDescription)); + } + return aidl; +} + +ConversionResult xsdc2aidl_SurroundSoundConfig( + const ::xsd::SurroundSound& xsdc) { + SurroundSoundConfig aidl; + if (!xsdc.hasFormats() || !xsdc.getFirstFormats()->hasFormat()) return aidl; + aidl.formatFamilies = VALUE_OR_RETURN( + convertContainer>( + xsdc.getFirstFormats()->getFormat(), xsdc2aidl_SurroundFormatFamily)); + return aidl; +} + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/Android.bp b/audio/Android.bp new file mode 100644 index 0000000..a9ecdc2 --- /dev/null +++ b/audio/Android.bp @@ -0,0 +1,369 @@ +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_defaults { + name: "aidlaudioservice_defaults", + vendor: true, + shared_libs: [ + "libalsautilsv2", + "libaudio_aidl_conversion_common_ndk", + "libaudioaidlcommon", + "libaudioutils", + "libbase", + "libbinder_ndk", + "libcutils", + "libfmq", + "libnbaio_mono", + "liblog", + "libmedia_helper", + "libstagefright_foundation", + "libtinyalsav2", + "libutils", + "libxml2", + "android.hardware.common-V2-ndk", + "android.hardware.common.fmq-V1-ndk", + ], + header_libs: [ + "libaudio_system_headers", + "libaudioaidl_headers", + "libxsdc-utils", + ], + cflags: [ + "-DBACKEND_NDK", + ], +} + +cc_library { + name: "libaudioserviceexampleimpl", + defaults: [ + "aidlaudioservice_defaults", + "latest_android_media_audio_common_types_ndk_shared", + "latest_android_hardware_audio_core_ndk_shared", + "latest_android_hardware_audio_core_sounddose_ndk_shared", + "latest_android_hardware_bluetooth_audio_ndk_shared", + ], + export_include_dirs: ["include"], + srcs: [ + "AidlConversionXsdc.cpp", + "AudioPolicyConfigXmlConverter.cpp", + "Bluetooth.cpp", + "CapEngineConfigXmlConverter.cpp", + "Config.cpp", + "Configuration.cpp", + "EngineConfigXmlConverter.cpp", + "Module.cpp", + "ModulePrimary.cpp", + "SoundDose.cpp", + "Stream.cpp", + "Telephony.cpp", + "XsdcConversion.cpp", + "alsa/Mixer.cpp", + "alsa/ModuleAlsa.cpp", + "alsa/StreamAlsa.cpp", + "alsa/Utils.cpp", + "bluetooth/DevicePortProxy.cpp", + "bluetooth/ModuleBluetooth.cpp", + "bluetooth/StreamBluetooth.cpp", + "deprecated/StreamSwitcher.cpp", + "primary/PrimaryMixer.cpp", + "primary/StreamPrimary.cpp", + "r_submix/ModuleRemoteSubmix.cpp", + "r_submix/SubmixRoute.cpp", + "r_submix/StreamRemoteSubmix.cpp", + "stub/ApeHeader.cpp", + "stub/DriverStubImpl.cpp", + "stub/ModuleStub.cpp", + "stub/StreamMmapStub.cpp", + "stub/StreamOffloadStub.cpp", + "stub/StreamStub.cpp", + "usb/ModuleUsb.cpp", + "usb/StreamUsb.cpp", + "usb/UsbAlsaMixerControl.cpp", + ], + generated_sources: [ + "audio_policy_capengine_configuration_aidl_default", + "audio_policy_configuration_aidl_default", + "audio_policy_engine_configuration_aidl_default", + ], + generated_headers: [ + "audio_policy_capengine_configuration_aidl_default", + "audio_policy_configuration_aidl_default", + "audio_policy_engine_configuration_aidl_default", + ], + export_generated_headers: [ + "audio_policy_capengine_configuration_aidl_default", + "audio_policy_configuration_aidl_default", + "audio_policy_engine_configuration_aidl_default", + ], + shared_libs: [ + "android.hardware.bluetooth.audio-impl", + "libaudio_aidl_conversion_common_ndk", + "libaudioutils", + "libbluetooth_audio_session_aidl", + "liblog", + "libmedia_helper", + "libmediautils_vendor", + "libstagefright_foundation", + ], + export_shared_lib_headers: [ + "libaudio_aidl_conversion_common_ndk", + ], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-Wthread-safety", + "-DBACKEND_NDK", + ], +} + +cc_library { + name: "libeffectconfig", + srcs: [ + "EffectConfig.cpp", + ], + defaults: [ + "latest_android_hardware_audio_effect_ndk_shared", + "latest_android_media_audio_common_types_ndk_shared", + ], + shared_libs: [ + "libaudioutils", + "libaudio_aidl_conversion_common_ndk", + "libbase", + "libbinder_ndk", + "liblog", + "libmedia_helper", + "libtinyxml2", + "libutils", + ], + header_libs: [ + "libaudio_system_headers", + "libaudioaidl_headers", + ], + export_shared_lib_headers: [ + "libtinyxml2", + ], + export_include_dirs: [ + "include", + ], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-DBACKEND_NDK", + ], + vendor: true, + host_supported: true, + target: { + android: { + shared_libs: [ + "libapexsupport", + ], + }, + }, +} + +cc_binary { + name: "android.hardware.audio.service-aidl.example", + relative_install_path: "hw", + defaults: [ + "aidlaudioservice_defaults", + "latest_android_hardware_audio_core_sounddose_ndk_shared", + "latest_android_hardware_audio_core_ndk_shared", + "latest_android_hardware_bluetooth_audio_ndk_shared", + "latest_android_media_audio_common_types_ndk_shared", + ], + static_libs: [ + "libaudioserviceexampleimpl", + ], + shared_libs: [ + "android.hardware.bluetooth.audio-impl", + "libaudio_aidl_conversion_common_ndk", + "libbluetooth_audio_session_aidl", + "liblog", + "libmedia_helper", + "libstagefright_foundation", + ], + srcs: ["main.cpp"], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-Wthread-safety", + "-DBACKEND_NDK", + ], + installable: false, //installed in apex com.android.hardware.audio +} + +cc_test { + name: "audio_policy_config_xml_converter_tests", + vendor_available: true, + defaults: [ + "latest_android_media_audio_common_types_ndk_static", + "latest_android_hardware_audio_core_ndk_static", + ], + shared_libs: [ + "libaudio_aidl_conversion_common_ndk", + "libaudioaidlcommon", + "libaudioutils", + "libbase", + "libbinder_ndk", + "libcutils", + "libfmq", + "libmedia_helper", + "libstagefright_foundation", + "libutils", + "libxml2", + ], + header_libs: [ + "libaudio_system_headers", + "libaudioaidl_headers", + "libxsdc-utils", + ], + generated_sources: [ + "audio_policy_configuration_aidl_default", + "audio_policy_engine_configuration_aidl_default", + ], + generated_headers: [ + "audio_policy_configuration_aidl_default", + "audio_policy_engine_configuration_aidl_default", + ], + srcs: [ + "AudioPolicyConfigXmlConverter.cpp", + "tests/AudioPolicyConfigXmlConverterTest.cpp", + ], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-Wthread-safety", + "-DBACKEND_NDK", + ], + test_suites: ["general-tests"], +} + +cc_test { + name: "audio_alsa_utils_tests", + vendor_available: true, + defaults: [ + "latest_android_media_audio_common_types_ndk_static", + "latest_android_hardware_audio_core_ndk_static", + ], + static_libs: [ + "libalsautilsv2", + "libtinyalsav2", + ], + shared_libs: [ + "libaudio_aidl_conversion_common_ndk", + "libaudioaidlcommon", + "libaudioutils", + "libbase", + "libbinder_ndk", + "libcutils", + "libfmq", + "libmedia_helper", + "libstagefright_foundation", + "libutils", + ], + header_libs: [ + "libaudio_system_headers", + "libaudioaidl_headers", + ], + srcs: [ + "alsa/Utils.cpp", + "tests/AlsaUtilsTest.cpp", + ], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-Wthread-safety", + "-DBACKEND_NDK", + ], + test_suites: ["general-tests"], +} + +cc_defaults { + name: "aidlaudioeffectservice_defaults", + defaults: [ + "latest_android_media_audio_common_types_ndk_shared", + "latest_android_hardware_audio_effect_ndk_shared", + ], + vendor: true, + shared_libs: [ + "libaudio_aidl_conversion_common_ndk", + "libaudioaidlcommon", + "libaudioutils", + "libbase", + "libbinder_ndk", + "libcutils", + "libfmq", + "liblog", + "libutils", + "android.hardware.common-V2-ndk", + "android.hardware.common.fmq-V1-ndk", + ], + header_libs: [ + "libaudioaidl_headers", + "libaudio_system_headers", + "libsystem_headers", + ], + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + "-Wthread-safety", + "-DBACKEND_NDK", + ], +} + +filegroup { + name: "effectCommonFile", + srcs: [ + "EffectContext.cpp", + "EffectThread.cpp", + "EffectImpl.cpp", + ], +} + +cc_binary { + name: "android.hardware.audio.effect.service-aidl.example", + relative_install_path: "hw", + defaults: ["aidlaudioeffectservice_defaults"], + shared_libs: [ + "libapexsupport", + "libeffectconfig", + ], + srcs: [ + "EffectFactory.cpp", + "EffectMain.cpp", + ], + installable: false, //installed in apex com.android.hardware.audio +} + +cc_library_headers { + name: "libaudioaidl_headers", + export_include_dirs: ["include"], + vendor_available: true, + host_supported: true, +} + +prebuilt_etc { + name: "android.hardware.audio.service-aidl.example.rc", + src: "android.hardware.audio.service-aidl.example.rc", + installable: false, +} + +prebuilt_etc { + name: "android.hardware.audio.service-aidl.xml", + src: "android.hardware.audio.service-aidl.xml", + sub_dir: "vintf", + installable: false, +} diff --git a/audio/AudioPolicyConfigXmlConverter.cpp b/audio/AudioPolicyConfigXmlConverter.cpp new file mode 100644 index 0000000..2f1282a --- /dev/null +++ b/audio/AudioPolicyConfigXmlConverter.cpp @@ -0,0 +1,198 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include +#include + +#define LOG_TAG "AHAL_ApmXmlConverter" +#include + +#include +#include +#include + +#include "core-impl/AidlConversionXsdc.h" +#include "core-impl/AudioPolicyConfigXmlConverter.h" +#include "core-impl/XsdcConversion.h" + +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioHalEngineConfig; +using aidl::android::media::audio::common::AudioHalVolumeCurve; +using aidl::android::media::audio::common::AudioHalVolumeGroup; +using aidl::android::media::audio::common::AudioStreamType; + +namespace ap_xsd = android::audio::policy::configuration; + +namespace aidl::android::hardware::audio::core::internal { + +static const int kDefaultVolumeIndexMin = 0; +static const int kDefaultVolumeIndexMax = 100; +static const int KVolumeIndexDeferredToAudioService = -1; + +ConversionResult AudioPolicyConfigXmlConverter::convertVolumeCurveToAidl( + const ap_xsd::Volume& xsdcVolumeCurve) { + AudioHalVolumeCurve aidlVolumeCurve; + aidlVolumeCurve.deviceCategory = + static_cast(xsdcVolumeCurve.getDeviceCategory()); + if (xsdcVolumeCurve.hasRef()) { + if (mVolumesReferenceMap.empty()) { + mVolumesReferenceMap = generateReferenceMap( + getXsdcConfig()->getVolumes()); + } + aidlVolumeCurve.curvePoints = VALUE_OR_FATAL( + (convertCollectionToAidl( + mVolumesReferenceMap.at(xsdcVolumeCurve.getRef()).getPoint(), + &convertCurvePointToAidl))); + } else { + aidlVolumeCurve.curvePoints = VALUE_OR_FATAL( + (convertCollectionToAidl( + xsdcVolumeCurve.getPoint(), &convertCurvePointToAidl))); + } + return aidlVolumeCurve; +} + +void AudioPolicyConfigXmlConverter::mapStreamToVolumeCurve(const ap_xsd::Volume& xsdcVolumeCurve) { + mStreamToVolumeCurvesMap[xsdcVolumeCurve.getStream()].push_back( + VALUE_OR_FATAL(convertVolumeCurveToAidl(xsdcVolumeCurve))); +} + +const SurroundSoundConfig& AudioPolicyConfigXmlConverter::getSurroundSoundConfig() { + static const SurroundSoundConfig aidlSurroundSoundConfig = [this]() { + if (auto xsdcConfig = getXsdcConfig(); xsdcConfig && xsdcConfig->hasSurroundSound()) { + auto configConv = xsdc2aidl_SurroundSoundConfig(*xsdcConfig->getFirstSurroundSound()); + if (configConv.ok()) { + return configConv.value(); + } + LOG(ERROR) << "There was an error converting surround formats to AIDL: " + << configConv.error(); + } + LOG(WARNING) << "Audio policy config does not have section, using default"; + return getDefaultSurroundSoundConfig(); + }(); + return aidlSurroundSoundConfig; +} + +std::unique_ptr +AudioPolicyConfigXmlConverter::releaseModuleConfigs() { + return std::move(mModuleConfigurations); +} + +const AudioHalEngineConfig& AudioPolicyConfigXmlConverter::getAidlEngineConfig() { + if (mAidlEngineConfig.volumeGroups.empty() && getXsdcConfig() && + getXsdcConfig()->hasVolumes()) { + parseVolumes(); + } + return mAidlEngineConfig; +} + +// static +const SurroundSoundConfig& AudioPolicyConfigXmlConverter::getDefaultSurroundSoundConfig() { + // Provide a config similar to the one used by the framework by default + // (see AudioPolicyConfig::setDefaultSurroundFormats). +#define ENCODED_FORMAT(format) \ + AudioFormatDescription { \ + .encoding = ::android::format \ + } +#define SIMPLE_FORMAT(format) \ + SurroundSoundConfig::SurroundFormatFamily { \ + .primaryFormat = ENCODED_FORMAT(format) \ + } + + static const SurroundSoundConfig defaultConfig = { + .formatFamilies = { + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_AC3), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_EAC3), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_HD), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_HD_MA), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD), + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_EAC3_JOC), + SurroundSoundConfig::SurroundFormatFamily{ + .primaryFormat = ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_LC), + .subFormats = + { + ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_HE_V1), + ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_HE_V2), + ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_ELD), + ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_XHE), + }}, + SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_AC4), + }}; +#undef SIMPLE_FORMAT +#undef ENCODED_FORMAT + + return defaultConfig; +} + +void AudioPolicyConfigXmlConverter::mapStreamsToVolumeCurves() { + if (getXsdcConfig()->hasVolumes()) { + for (const ap_xsd::Volumes& xsdcWrapperType : getXsdcConfig()->getVolumes()) { + for (const ap_xsd::Volume& xsdcVolume : xsdcWrapperType.getVolume()) { + mapStreamToVolumeCurve(xsdcVolume); + } + } + } +} + +void AudioPolicyConfigXmlConverter::addVolumeGroupstoEngineConfig() { + for (const auto& [xsdcStream, volumeCurves] : mStreamToVolumeCurvesMap) { + AudioHalVolumeGroup volumeGroup; + volumeGroup.name = ap_xsd::toString(xsdcStream); + if (static_cast(xsdcStream) >= AUDIO_STREAM_PUBLIC_CNT) { + volumeGroup.minIndex = kDefaultVolumeIndexMin; + volumeGroup.maxIndex = kDefaultVolumeIndexMax; + } else { + volumeGroup.minIndex = KVolumeIndexDeferredToAudioService; + volumeGroup.maxIndex = KVolumeIndexDeferredToAudioService; + } + volumeGroup.volumeCurves = volumeCurves; + mAidlEngineConfig.volumeGroups.push_back(std::move(volumeGroup)); + } +} + +void AudioPolicyConfigXmlConverter::parseVolumes() { + if (mStreamToVolumeCurvesMap.empty() && getXsdcConfig()->hasVolumes()) { + mapStreamsToVolumeCurves(); + addVolumeGroupstoEngineConfig(); + } +} + +void AudioPolicyConfigXmlConverter::init() { + if (!getXsdcConfig()->hasModules()) return; + for (const ap_xsd::Modules& xsdcModulesType : getXsdcConfig()->getModules()) { + if (!xsdcModulesType.has_module()) continue; + for (const ap_xsd::Modules::Module& xsdcModule : xsdcModulesType.get_module()) { + // 'primary' in the XML schema used by HIDL is equivalent to 'default' module. + const std::string name = + xsdcModule.getName() != "primary" ? xsdcModule.getName() : "default"; + if (name != "r_submix") { + mModuleConfigurations->emplace_back( + name, VALUE_OR_FATAL(convertModuleConfigToAidl(xsdcModule))); + } else { + // See the note on the 'getRSubmixConfiguration' function. + mModuleConfigurations->emplace_back(name, nullptr); + } + } + } +} + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/Bluetooth.cpp b/audio/Bluetooth.cpp new file mode 100644 index 0000000..072b89f --- /dev/null +++ b/audio/Bluetooth.cpp @@ -0,0 +1,130 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Bluetooth" +#include + +#include "core-impl/Bluetooth.h" + +using aidl::android::hardware::audio::core::VendorParameter; +using aidl::android::media::audio::common::Boolean; +using aidl::android::media::audio::common::Float; +using aidl::android::media::audio::common::Int; + +namespace aidl::android::hardware::audio::core { + +Bluetooth::Bluetooth() { + mScoConfig.isEnabled = Boolean{false}; + mScoConfig.isNrecEnabled = Boolean{false}; + mScoConfig.mode = ScoConfig::Mode::SCO; + mHfpConfig.isEnabled = Boolean{false}; + mHfpConfig.sampleRate = Int{8000}; + mHfpConfig.volume = Float{HfpConfig::VOLUME_MAX}; +} + +ndk::ScopedAStatus Bluetooth::setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) { + if (in_config.isEnabled.has_value()) { + mScoConfig.isEnabled = in_config.isEnabled; + } + if (in_config.isNrecEnabled.has_value()) { + mScoConfig.isNrecEnabled = in_config.isNrecEnabled; + } + if (in_config.mode != ScoConfig::Mode::UNSPECIFIED) { + mScoConfig.mode = in_config.mode; + } + if (in_config.debugName.has_value()) { + mScoConfig.debugName = in_config.debugName; + } + *_aidl_return = mScoConfig; + LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning " + << _aidl_return->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Bluetooth::setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) { + if (in_config.sampleRate.has_value() && in_config.sampleRate.value().value <= 0) { + LOG(ERROR) << __func__ << ": invalid sample rate: " << in_config.sampleRate.value().value; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_config.volume.has_value() && (in_config.volume.value().value < HfpConfig::VOLUME_MIN || + in_config.volume.value().value > HfpConfig::VOLUME_MAX)) { + LOG(ERROR) << __func__ << ": invalid volume: " << in_config.volume.value().value; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + if (in_config.isEnabled.has_value()) { + mHfpConfig.isEnabled = in_config.isEnabled; + } + if (in_config.sampleRate.has_value()) { + mHfpConfig.sampleRate = in_config.sampleRate; + } + if (in_config.volume.has_value()) { + mHfpConfig.volume = in_config.volume; + } + *_aidl_return = mHfpConfig; + LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning " + << _aidl_return->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothA2dp::isEnabled(bool* _aidl_return) { + *_aidl_return = mEnabled; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothA2dp::setEnabled(bool in_enabled) { + mEnabled = in_enabled; + LOG(DEBUG) << __func__ << ": " << mEnabled; + if (mHandler) return mHandler(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothA2dp::supportsOffloadReconfiguration(bool* _aidl_return) { + *_aidl_return = false; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothA2dp::reconfigureOffload( + const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters + __unused) { + LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus BluetoothLe::isEnabled(bool* _aidl_return) { + *_aidl_return = mEnabled; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothLe::setEnabled(bool in_enabled) { + mEnabled = in_enabled; + if (mHandler) return mHandler(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothLe::supportsOffloadReconfiguration(bool* _aidl_return) { + *_aidl_return = false; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BluetoothLe::reconfigureOffload( + const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters + __unused) { + LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/CapEngineConfigXmlConverter.cpp b/audio/CapEngineConfigXmlConverter.cpp new file mode 100644 index 0000000..14d27f2 --- /dev/null +++ b/audio/CapEngineConfigXmlConverter.cpp @@ -0,0 +1,405 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Config" + +#include +#include +#include +#include +#include +#include + +#include "core-impl/CapEngineConfigXmlConverter.h" +#include "core-impl/XsdcConversion.h" + +using aidl::android::hardware::audio::common::iequals; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioHalCapConfiguration; +using aidl::android::media::audio::common::AudioHalCapCriterionV2; +using aidl::android::media::audio::common::AudioHalCapDomain; +using aidl::android::media::audio::common::AudioHalCapParameter; +using aidl::android::media::audio::common::AudioHalCapRule; +using aidl::android::media::audio::common::AudioPolicyForceUse; +using aidl::android::media::audio::common::AudioSource; +using aidl::android::media::audio::common::AudioStreamType; + +using ::android::BAD_VALUE; +using ::android::base::unexpected; +using ::android::utilities::convertTo; + +namespace eng_xsd = android::audio::policy::capengine::configuration; + +namespace aidl::android::hardware::audio::core::internal { + +static constexpr const char* gStrategiesParameter = "product_strategies"; +static constexpr const char* gInputSourcesParameter = "input_sources"; +static constexpr const char* gStreamsParameter = "streams"; +static constexpr const char* gOutputDevicesParameter = "selected_output_devices"; +static constexpr const char* gOutputDeviceAddressParameter = "device_address"; +static constexpr const char* gStrategyPrefix = "vx_"; +static constexpr const char* gLegacyStrategyPrefix = "STRATEGY_"; +static constexpr const char* gLegacyOutputDevicePrefix = "AUDIO_DEVICE_OUT_"; +static constexpr const char* gLegacyInputDevicePrefix = "AUDIO_DEVICE_IN_"; +static constexpr const char* gLegacyStreamPrefix = "AUDIO_STREAM_"; +static constexpr const char* gLegacySourcePrefix = "AUDIO_SOURCE_"; + +std::optional>>& +CapEngineConfigXmlConverter::getAidlCapEngineConfig() { + return mAidlCapDomains; +} + +ConversionResult convertCriterionRuleToAidl( + const eng_xsd::SelectionCriterionRuleType& xsdcRule) { + using Tag = AudioHalCapCriterionV2::Tag; + AudioHalCapRule::CriterionRule rule{}; + std::string criterionName = xsdcRule.getSelectionCriterion(); + std::string criterionValue = xsdcRule.getValue(); + if (iequals(criterionName, toString(Tag::availableInputDevices))) { + AudioHalCapCriterionV2::AvailableDevices value; + value.values.emplace_back(VALUE_OR_RETURN( + convertDeviceTypeToAidl(gLegacyInputDevicePrefix + criterionValue))); + rule.criterionAndValue = AudioHalCapCriterionV2::make(value); + + } else if (iequals(criterionName, toString(Tag::availableOutputDevices))) { + AudioHalCapCriterionV2::AvailableDevices value; + value.values.emplace_back(VALUE_OR_RETURN( + convertDeviceTypeToAidl(gLegacyOutputDevicePrefix + criterionValue))); + rule.criterionAndValue = AudioHalCapCriterionV2::make(value); + } else if (iequals(criterionName, toString(Tag::availableInputDevicesAddresses))) { + AudioHalCapCriterionV2::AvailableDevicesAddresses value; + value.values.emplace_back(criterionValue); + rule.criterionAndValue = + AudioHalCapCriterionV2::make(value); + } else if (iequals(criterionName, toString(Tag::availableOutputDevicesAddresses))) { + AudioHalCapCriterionV2::AvailableDevicesAddresses value; + value.values.emplace_back(criterionValue); + rule.criterionAndValue = + AudioHalCapCriterionV2::make(value); + } else if (iequals(criterionName, toString(Tag::telephonyMode))) { + AudioHalCapCriterionV2::TelephonyMode value; + value.values.emplace_back(VALUE_OR_RETURN(convertTelephonyModeToAidl(criterionValue))); + rule.criterionAndValue = AudioHalCapCriterionV2::make(value); + } else if (!fastcmp(criterionName.c_str(), kXsdcForceConfigForUse, + strlen(kXsdcForceConfigForUse))) { + AudioHalCapCriterionV2::ForceConfigForUse value; + value.values.emplace_back( + VALUE_OR_RETURN(convertForceUseToAidl(criterionName, criterionValue))); + rule.criterionAndValue = AudioHalCapCriterionV2::make(value); + } else { + LOG(ERROR) << __func__ << " unrecognized criterion " << criterionName; + return unexpected(BAD_VALUE); + } + if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Excludes) { + rule.matchingRule = AudioHalCapRule::MatchingRule::EXCLUDES; + } else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Includes) { + rule.matchingRule = AudioHalCapRule::MatchingRule::INCLUDES; + } else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Is) { + rule.matchingRule = AudioHalCapRule::MatchingRule::IS; + } else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::IsNot) { + rule.matchingRule = AudioHalCapRule::MatchingRule::IS_NOT; + } else { + LOG(ERROR) << "Unsupported match when rule."; + return unexpected(BAD_VALUE); + } + return rule; +} + +ConversionResult convertRule(const eng_xsd::CompoundRuleType& xsdcCompoundRule) { + AudioHalCapRule rule{}; + bool isPreviousCompoundRule = true; + if (xsdcCompoundRule.getType() == eng_xsd::TypeEnum::Any) { + rule.compoundRule = AudioHalCapRule::CompoundRule::ANY; + } else if (xsdcCompoundRule.getType() == eng_xsd::TypeEnum::All) { + rule.compoundRule = AudioHalCapRule::CompoundRule::ALL; + } else { + LOG(ERROR) << "Unsupported compound rule type."; + return unexpected(BAD_VALUE); + } + for (const auto& childXsdcCoumpoundRule : xsdcCompoundRule.getCompoundRule_optional()) { + if (childXsdcCoumpoundRule.hasCompoundRule_optional()) { + rule.nestedRules.push_back(VALUE_OR_FATAL(convertRule(childXsdcCoumpoundRule))); + } else if (childXsdcCoumpoundRule.hasSelectionCriterionRule_optional()) { + rule.nestedRules.push_back(VALUE_OR_FATAL(convertRule(childXsdcCoumpoundRule))); + } + } + if (xsdcCompoundRule.hasSelectionCriterionRule_optional()) { + for (const auto& xsdcRule : xsdcCompoundRule.getSelectionCriterionRule_optional()) { + rule.criterionRules.push_back(VALUE_OR_FATAL(convertCriterionRuleToAidl(xsdcRule))); + } + } + return rule; +} + +ConversionResult getAudioProductStrategyId(const std::string& path) { + std::vector strings; + std::istringstream pathStream(path); + std::string stringToken; + while (getline(pathStream, stringToken, '/')) { + std::size_t pos = stringToken.find(gStrategyPrefix); + if (pos != std::string::npos) { + std::string strategyIdLiteral = stringToken.substr(pos + std::strlen(gStrategyPrefix)); + int strategyId; + if (!convertTo(strategyIdLiteral, strategyId)) { + LOG(ERROR) << "Invalid strategy " << stringToken << " from path " << path; + return unexpected(BAD_VALUE); + } + return strategyId; + } + pos = stringToken.find(gLegacyStrategyPrefix); + if (pos != std::string::npos) { + std::string legacyStrategyIdLiteral = stringToken.substr(pos); + const auto legacyStrategies = getLegacyProductStrategyMap(); + if (const auto& it = legacyStrategies.find(legacyStrategyIdLiteral); + it != legacyStrategies.end()) { + return it->second; + } + LOG(ERROR) << "Invalid legacy strategy " << stringToken << " from path " << path; + return unexpected(BAD_VALUE); + } + } + return unexpected(BAD_VALUE); +} + +ConversionResult getAudioSource(const std::string& path) { + std::vector strings; + std::istringstream pathStream(path); + std::string stringToken; + while (getline(pathStream, stringToken, '/')) { + if (stringToken.find(gInputSourcesParameter) != std::string::npos) { + getline(pathStream, stringToken, '/'); + std::transform(stringToken.begin(), stringToken.end(), stringToken.begin(), + [](char c) { return std::toupper(c); }); + std::string legacySourceLiteral = "AUDIO_SOURCE_" + stringToken; + audio_source_t legacySource; + if (!::android::SourceTypeConverter::fromString(legacySourceLiteral, legacySource)) { + LOG(ERROR) << "Invalid source " << stringToken << " from path " << path; + return unexpected(BAD_VALUE); + } + return legacy2aidl_audio_source_t_AudioSource(legacySource); + } + } + return unexpected(BAD_VALUE); +} + +ConversionResult getAudioStreamType(const std::string& path) { + std::vector strings; + std::istringstream pathStream(path); + std::string stringToken; + + while (getline(pathStream, stringToken, '/')) { + if (stringToken.find(gStreamsParameter) != std::string::npos) { + getline(pathStream, stringToken, '/'); + std::transform(stringToken.begin(), stringToken.end(), stringToken.begin(), + [](char c) { return std::toupper(c); }); + std::string legacyStreamLiteral = std::string(gLegacyStreamPrefix) + stringToken; + audio_stream_type_t legacyStream; + if (!::android::StreamTypeConverter::fromString(legacyStreamLiteral, legacyStream)) { + LOG(ERROR) << "Invalid stream " << stringToken << " from path " << path; + return unexpected(BAD_VALUE); + } + return legacy2aidl_audio_stream_type_t_AudioStreamType(legacyStream); + } + } + return unexpected(BAD_VALUE); +} + +ConversionResult toUpperAndAppendPrefix(const std::string& capName, + const std::string& legacyPrefix) { + std::string legacyName = capName; + std::transform(legacyName.begin(), legacyName.end(), legacyName.begin(), + [](char c) { return std::toupper(c); }); + return legacyPrefix + legacyName; +} + +ConversionResult CapEngineConfigXmlConverter::convertParamToAidl( + const eng_xsd::ConfigurableElementSettingsType& element) { + const auto& path = element.getPath(); + + AudioHalCapParameter parameterSetting; + if (path.find(gStrategiesParameter) != std::string::npos) { + int strategyId = VALUE_OR_FATAL(getAudioProductStrategyId(path)); + if (path.find(gOutputDevicesParameter) != std::string::npos) { + // Value is 1 or 0 + if (!element.hasBitParameter_optional()) { + LOG(ERROR) << "Invalid strategy value type"; + return unexpected(BAD_VALUE); + } + // Convert name to output device type + const auto* xsdcParam = element.getFirstBitParameter_optional(); + std::string outputDevice = VALUE_OR_FATAL(toUpperAndAppendPrefix( + eng_xsd::toString(xsdcParam->getName()), gLegacyOutputDevicePrefix)); + audio_devices_t legacyType; + if (!::android::OutputDeviceConverter::fromString(outputDevice, legacyType)) { + LOG(ERROR) << "Invalid strategy device type " << outputDevice; + return unexpected(BAD_VALUE); + } + AudioDeviceDescription aidlDevice = + VALUE_OR_FATAL(legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType)); + bool isSelected; + if (!convertTo(xsdcParam->getValue(), isSelected)) { + LOG(ERROR) << "Invalid strategy device selection value " << xsdcParam->getValue(); + return unexpected(BAD_VALUE); + } + parameterSetting = + AudioHalCapParameter::StrategyDevice(aidlDevice, strategyId, isSelected); + } else if (path.find(gOutputDeviceAddressParameter) != std::string::npos) { + // Value is the address + if (!element.hasStringParameter_optional()) { + return unexpected(BAD_VALUE); + } + std::string address = element.getFirstStringParameter_optional()->getValue(); + parameterSetting = AudioHalCapParameter::StrategyDeviceAddress( + AudioDeviceAddress(address), strategyId); + } + } else if (path.find(gInputSourcesParameter) != std::string::npos) { + // Value is 1 or 0 + if (!element.hasBitParameter_optional()) { + LOG(ERROR) << "Invalid source value type"; + return unexpected(BAD_VALUE); + } + AudioSource audioSourceAidl = VALUE_OR_FATAL(getAudioSource(path)); + const auto* xsdcParam = element.getFirstBitParameter_optional(); + std::string inputDeviceLiteral = VALUE_OR_FATAL(toUpperAndAppendPrefix( + eng_xsd::toString(xsdcParam->getName()), gLegacyInputDevicePrefix)); + audio_devices_t inputDeviceType; + if (!::android::InputDeviceConverter::fromString(inputDeviceLiteral, inputDeviceType)) { + LOG(ERROR) << "Invalid source device type " << inputDeviceLiteral; + return unexpected(BAD_VALUE); + } + AudioDeviceDescription aidlDevice = + VALUE_OR_FATAL(legacy2aidl_audio_devices_t_AudioDeviceDescription(inputDeviceType)); + + bool isSelected; + if (!convertTo(xsdcParam->getValue(), isSelected)) { + LOG(ERROR) << "Invalid source value type " << xsdcParam->getValue(); + return unexpected(BAD_VALUE); + } + parameterSetting = + AudioHalCapParameter::InputSourceDevice(aidlDevice, audioSourceAidl, isSelected); + } else if (path.find(gStreamsParameter) != std::string::npos) { + AudioStreamType audioStreamAidl = VALUE_OR_FATAL(getAudioStreamType(path)); + if (!element.hasEnumParameter_optional()) { + LOG(ERROR) << "Invalid stream value type"; + return unexpected(BAD_VALUE); + } + const auto* xsdcParam = element.getFirstEnumParameter_optional(); + std::string profileLiteral = + VALUE_OR_FATAL(toUpperAndAppendPrefix(xsdcParam->getValue(), gLegacyStreamPrefix)); + audio_stream_type_t profileLegacyStream; + if (!::android::StreamTypeConverter::fromString(profileLiteral, profileLegacyStream)) { + LOG(ERROR) << "Invalid stream value " << profileLiteral; + return unexpected(BAD_VALUE); + } + AudioStreamType profileStreamAidl = VALUE_OR_FATAL( + legacy2aidl_audio_stream_type_t_AudioStreamType(profileLegacyStream)); + parameterSetting = + AudioHalCapParameter::StreamVolumeProfile(audioStreamAidl, profileStreamAidl); + } + return parameterSetting; +} + +ConversionResult> +CapEngineConfigXmlConverter::convertSettingToAidl( + const eng_xsd::SettingsType::Configuration& xsdcSetting) { + std::vector aidlCapParameterSettings; + for (const auto& element : xsdcSetting.getConfigurableElement()) { + aidlCapParameterSettings.push_back(VALUE_OR_FATAL(convertParamToAidl(element))); + } + return aidlCapParameterSettings; +} + +ConversionResult CapEngineConfigXmlConverter::convertConfigurationToAidl( + const eng_xsd::ConfigurationsType::Configuration& xsdcConfiguration, + const eng_xsd::SettingsType::Configuration& xsdcSettingConfiguration) { + AudioHalCapConfiguration aidlCapConfiguration; + aidlCapConfiguration.name = xsdcConfiguration.getName(); + if (xsdcConfiguration.hasCompoundRule()) { + if (xsdcConfiguration.getCompoundRule().size() != 1) { + return unexpected(BAD_VALUE); + } + aidlCapConfiguration.rule = + VALUE_OR_FATAL(convertRule(xsdcConfiguration.getCompoundRule()[0])); + aidlCapConfiguration.parameterSettings = + VALUE_OR_FATAL(convertSettingToAidl(xsdcSettingConfiguration)); + } + return aidlCapConfiguration; +} + +ConversionResult getConfigurationByName( + const std::string& name, const std::vector& xsdcSettingsVec) { + for (const auto& xsdcSettings : xsdcSettingsVec) { + for (const auto& xsdcConfiguration : xsdcSettings.getConfiguration()) { + if (xsdcConfiguration.getName() == name) { + return xsdcConfiguration; + } + } + } + LOG(ERROR) << __func__ << " failed to find configuration " << name; + return unexpected(BAD_VALUE); +} + +ConversionResult> +CapEngineConfigXmlConverter::convertConfigurationsToAidl( + const std::vector& xsdcConfigurationsVec, + const std::vector& xsdcSettingsVec) { + if (xsdcConfigurationsVec.empty() || xsdcSettingsVec.empty()) { + LOG(ERROR) << __func__ << " empty configurations/settings"; + return unexpected(BAD_VALUE); + } + std::vector aidlConfigurations; + for (const auto& xsdcConfigurations : xsdcConfigurationsVec) { + for (const auto& xsdcConfiguration : xsdcConfigurations.getConfiguration()) { + auto xsdcSettingConfiguration = VALUE_OR_FATAL( + getConfigurationByName(xsdcConfiguration.getName(), xsdcSettingsVec)); + aidlConfigurations.push_back(VALUE_OR_FATAL( + convertConfigurationToAidl(xsdcConfiguration, xsdcSettingConfiguration))); + } + } + return aidlConfigurations; +} + +ConversionResult CapEngineConfigXmlConverter::convertConfigurableDomainToAidl( + const eng_xsd::ConfigurableDomainType& xsdcConfigurableDomain) { + AudioHalCapDomain aidlConfigurableDomain; + + aidlConfigurableDomain.name = xsdcConfigurableDomain.getName(); + if (xsdcConfigurableDomain.hasSequenceAware() && xsdcConfigurableDomain.getSequenceAware()) { + LOG(ERROR) << "sequence aware not supported."; + return unexpected(BAD_VALUE); + } + if (xsdcConfigurableDomain.hasConfigurations() && xsdcConfigurableDomain.hasSettings()) { + aidlConfigurableDomain.configurations = VALUE_OR_FATAL(convertConfigurationsToAidl( + xsdcConfigurableDomain.getConfigurations(), xsdcConfigurableDomain.getSettings())); + } + return aidlConfigurableDomain; +} + +void CapEngineConfigXmlConverter::init() { + if (getXsdcConfig()->hasConfigurableDomain()) { + mAidlCapDomains = std::make_optional<>(VALUE_OR_FATAL( + (convertCollectionToAidlOptionalValues( + getXsdcConfig()->getConfigurableDomain(), + std::bind(&CapEngineConfigXmlConverter::convertConfigurableDomainToAidl, + this, std::placeholders::_1))))); + } else { + mAidlCapDomains = std::nullopt; + } +} + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/Config.cpp b/audio/Config.cpp new file mode 100644 index 0000000..308200a --- /dev/null +++ b/audio/Config.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Config" +#include + +#include + +#include "core-impl/AudioPolicyConfigXmlConverter.h" +#include "core-impl/Config.h" +#include "core-impl/EngineConfigXmlConverter.h" + +using aidl::android::media::audio::common::AudioHalEngineConfig; + +namespace aidl::android::hardware::audio::core { +ndk::ScopedAStatus Config::getSurroundSoundConfig(SurroundSoundConfig* _aidl_return) { + static const auto& func = __func__; + static const SurroundSoundConfig surroundSoundConfig = [this]() { + SurroundSoundConfig surroundCfg = mAudioPolicyConverter.getSurroundSoundConfig(); + if (mAudioPolicyConverter.getStatus() != ::android::OK) { + LOG(WARNING) << func << ": " << mAudioPolicyConverter.getError(); + } + return surroundCfg; + }(); + *_aidl_return = surroundSoundConfig; + LOG(DEBUG) << __func__ << ": returning " << _aidl_return->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Config::getEngineConfig(AudioHalEngineConfig* _aidl_return) { + static const auto& func = __func__; + static const AudioHalEngineConfig returnEngCfg = [this]() { + AudioHalEngineConfig engConfig; + if (mEngConfigConverter.getStatus() == ::android::OK) { + engConfig = mEngConfigConverter.getAidlEngineConfig(); + } else { + LOG(INFO) << func << ": " << mEngConfigConverter.getError(); + if (mAudioPolicyConverter.getStatus() == ::android::OK) { + engConfig = mAudioPolicyConverter.getAidlEngineConfig(); + } else { + LOG(WARNING) << func << ": " << mAudioPolicyConverter.getError(); + } + } + // Logging full contents of the config is an overkill, just provide statistics. + LOG(DEBUG) << func + << ": number of strategies parsed: " << engConfig.productStrategies.size() + << ", default strategy: " << engConfig.defaultProductStrategyId + << ", number of volume groups parsed: " << engConfig.volumeGroups.size(); + return engConfig; + }(); + *_aidl_return = returnEngCfg; + return ndk::ScopedAStatus::ok(); +} +} // namespace aidl::android::hardware::audio::core diff --git a/audio/Configuration.cpp b/audio/Configuration.cpp new file mode 100644 index 0000000..0ff8eb4 --- /dev/null +++ b/audio/Configuration.cpp @@ -0,0 +1,707 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core-impl/Configuration.h" + +using aidl::android::hardware::audio::common::makeBitPositionFlagMask; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioGainConfig; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioOutputFlags; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortDeviceExt; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::AudioPortMixExt; +using aidl::android::media::audio::common::AudioProfile; +using aidl::android::media::audio::common::Int; +using aidl::android::media::audio::common::PcmType; +using Configuration = aidl::android::hardware::audio::core::Module::Configuration; + +namespace aidl::android::hardware::audio::core::internal { + +static void fillProfile(AudioProfile* profile, const std::vector& channelLayouts, + const std::vector& sampleRates) { + for (auto layout : channelLayouts) { + profile->channelMasks.push_back( + AudioChannelLayout::make(layout)); + } + profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end()); +} + +static AudioProfile createProfile(PcmType pcmType, const std::vector& channelLayouts, + const std::vector& sampleRates) { + AudioProfile profile; + profile.format.type = AudioFormatType::PCM; + profile.format.pcm = pcmType; + fillProfile(&profile, channelLayouts, sampleRates); + return profile; +} + +static AudioProfile createProfile(const std::string& encodingType, + const std::vector& channelLayouts, + const std::vector& sampleRates) { + AudioProfile profile; + profile.format.encoding = encodingType; + fillProfile(&profile, channelLayouts, sampleRates); + return profile; +} + +static AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags, + std::string connection = "") { + AudioPortDeviceExt deviceExt; + deviceExt.device.type.type = devType; + if (devType == AudioDeviceType::IN_MICROPHONE && connection.empty()) { + deviceExt.device.address = "bottom"; + } else if (devType == AudioDeviceType::IN_MICROPHONE_BACK && connection.empty()) { + deviceExt.device.address = "back"; + } + deviceExt.device.type.connection = std::move(connection); + deviceExt.flags = flags; + return AudioPortExt::make(deviceExt); +} + +static AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount) { + AudioPortMixExt mixExt; + mixExt.maxOpenStreamCount = maxOpenStreamCount; + mixExt.maxActiveStreamCount = maxActiveStreamCount; + return AudioPortExt::make(mixExt); +} + +static AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput, + const AudioPortExt& ext) { + AudioPort port; + port.id = id; + port.name = name; + port.flags = isInput ? AudioIoFlags::make(flags) + : AudioIoFlags::make(flags); + port.ext = ext; + return port; +} + +static AudioPortConfig createDynamicPortConfig(int32_t id, int32_t portId, int32_t flags, + bool isInput, const AudioPortExt& ext) { + AudioPortConfig config; + config.id = id; + config.portId = portId; + config.format = AudioFormatDescription{}; + config.channelMask = AudioChannelLayout{}; + config.sampleRate = Int{.value = 0}; + config.gain = AudioGainConfig(); + config.flags = isInput ? AudioIoFlags::make(flags) + : AudioIoFlags::make(flags); + config.ext = ext; + return config; +} + +static AudioPortConfig createPortConfig(int32_t id, int32_t portId, PcmType pcmType, int32_t layout, + int32_t sampleRate, int32_t flags, bool isInput, + const AudioPortExt& ext) { + AudioPortConfig config = createDynamicPortConfig(id, portId, flags, isInput, ext); + config.sampleRate = Int{.value = sampleRate}; + config.channelMask = AudioChannelLayout::make(layout); + config.format = AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType}; + return config; +} + +static AudioRoute createRoute(const std::vector& sources, const AudioPort& sink) { + AudioRoute route; + route.sinkPortId = sink.id; + std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds), + [](const auto& port) { return port.id; }); + return route; +} + +std::vector getStandard16And24BitPcmAudioProfiles() { + auto createStdPcmAudioProfile = [](const PcmType& pcmType) { + return AudioProfile{ + .format = AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType}, + .channelMasks = {AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_MONO), + AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_STEREO)}, + .sampleRates = {8000, 11025, 16000, 32000, 44100, 48000}}; + }; + return { + createStdPcmAudioProfile(PcmType::INT_16_BIT), + createStdPcmAudioProfile(PcmType::INT_24_BIT), + }; +} + +// Primary (default) configuration: +// +// Device ports: +// * "Speaker", OUT_SPEAKER, default +// - no profiles specified +// * "Built-In Mic", IN_MICROPHONE, default +// - no profiles specified +// * "Telephony Tx", OUT_TELEPHONY_TX +// - no profiles specified +// * "Telephony Rx", IN_TELEPHONY_RX +// - no profiles specified +// * "FM Tuner", IN_FM_TUNER +// - no profiles specified +// +// Mix ports: +// * "primary output", PRIMARY, 1 max open, 1 max active stream +// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// * "primary input", 1 max open, 1 max active stream +// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// * "telephony_tx", 1 max open, 1 max active stream +// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// * "telephony_rx", 1 max open, 1 max active stream +// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// * "fm_tuner", 1 max open, 1 max active stream +// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// +// Routes: +// "primary out" -> "Speaker" +// "Built-In Mic" -> "primary input" +// "Telephony Rx" -> "telephony_rx" +// "telephony_tx" -> "Telephony Tx" +// "FM Tuner" -> "fm_tuner" +// +// Initial port configs: +// * "Speaker" device port: dynamic configuration +// * "Built-In Mic" device port: dynamic configuration +// * "Telephony Tx" device port: dynamic configuration +// * "Telephony Rx" device port: dynamic configuration +// * "FM Tuner" device port: dynamic configuration +// +std::unique_ptr getPrimaryConfiguration() { + static const Configuration configuration = []() { + const std::vector standardPcmAudioProfiles = { + createProfile(PcmType::INT_16_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO}, + {8000, 11025, 16000, 32000, 44100, 48000})}; + Configuration c; + + // Device ports + + AudioPort speakerOutDevice = + createPort(c.nextPortId++, "Speaker", 0, false, + createDeviceExt(AudioDeviceType::OUT_SPEAKER, + 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE)); + c.ports.push_back(speakerOutDevice); + c.initialConfigs.push_back( + createDynamicPortConfig(speakerOutDevice.id, speakerOutDevice.id, 0, false, + createDeviceExt(AudioDeviceType::OUT_SPEAKER, 0))); + + AudioPort micInDevice = + createPort(c.nextPortId++, "Built-In Mic", 0, true, + createDeviceExt(AudioDeviceType::IN_MICROPHONE, + 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE)); + c.ports.push_back(micInDevice); + c.initialConfigs.push_back( + createDynamicPortConfig(micInDevice.id, micInDevice.id, 0, true, + createDeviceExt(AudioDeviceType::IN_MICROPHONE, 0))); + + AudioPort telephonyTxOutDevice = + createPort(c.nextPortId++, "Telephony Tx", 0, false, + createDeviceExt(AudioDeviceType::OUT_TELEPHONY_TX, 0)); + c.ports.push_back(telephonyTxOutDevice); + c.initialConfigs.push_back( + createDynamicPortConfig(telephonyTxOutDevice.id, telephonyTxOutDevice.id, 0, false, + createDeviceExt(AudioDeviceType::OUT_TELEPHONY_TX, 0))); + + AudioPort telephonyRxInDevice = + createPort(c.nextPortId++, "Telephony Rx", 0, true, + createDeviceExt(AudioDeviceType::IN_TELEPHONY_RX, 0)); + c.ports.push_back(telephonyRxInDevice); + c.initialConfigs.push_back( + createDynamicPortConfig(telephonyRxInDevice.id, telephonyRxInDevice.id, 0, true, + createDeviceExt(AudioDeviceType::IN_TELEPHONY_RX, 0))); + + AudioPort fmTunerInDevice = createPort(c.nextPortId++, "FM Tuner", 0, true, + createDeviceExt(AudioDeviceType::IN_FM_TUNER, 0)); + c.ports.push_back(fmTunerInDevice); + c.initialConfigs.push_back( + createDynamicPortConfig(fmTunerInDevice.id, fmTunerInDevice.id, 0, true, + createDeviceExt(AudioDeviceType::IN_FM_TUNER, 0))); + + // Mix ports + + AudioPort primaryOutMix = createPort(c.nextPortId++, "primary output", + makeBitPositionFlagMask(AudioOutputFlags::PRIMARY), + false, createPortMixExt(0, 0)); + primaryOutMix.profiles.insert(primaryOutMix.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(primaryOutMix); + + AudioPort primaryInMix = + createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1)); + primaryInMix.profiles.push_back( + createProfile(PcmType::INT_16_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO}, + {8000, 11025, 16000, 32000, 44100, 48000})); + c.ports.push_back(primaryInMix); + + AudioPort telephonyTxOutMix = + createPort(c.nextPortId++, "telephony_tx", 0, false, createPortMixExt(1, 1)); + telephonyTxOutMix.profiles.insert(telephonyTxOutMix.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(telephonyTxOutMix); + + AudioPort telephonyRxInMix = + createPort(c.nextPortId++, "telephony_rx", 0, true, createPortMixExt(0, 1)); + telephonyRxInMix.profiles.insert(telephonyRxInMix.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(telephonyRxInMix); + + AudioPort fmTunerInMix = + createPort(c.nextPortId++, "fm_tuner", 0, true, createPortMixExt(0, 1)); + fmTunerInMix.profiles.insert(fmTunerInMix.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(fmTunerInMix); + + c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice)); + c.routes.push_back(createRoute({micInDevice}, primaryInMix)); + c.routes.push_back(createRoute({telephonyRxInDevice}, telephonyRxInMix)); + c.routes.push_back(createRoute({telephonyTxOutMix}, telephonyTxOutDevice)); + c.routes.push_back(createRoute({fmTunerInDevice}, fmTunerInMix)); + + c.portConfigs.insert(c.portConfigs.end(), c.initialConfigs.begin(), c.initialConfigs.end()); + + return c; + }(); + return std::make_unique(configuration); +} + +// Note: When transitioning to loading of XML configs, either keep the configuration +// of the remote submix sources from this static configuration, or update the XML +// config to match it. There are several reasons for that: +// 1. The "Remote Submix In" device is listed in the XML config as "attached", +// however in the AIDL scheme its device type has a "virtual" connection. +// 2. The canonical r_submix configuration only lists 'STEREO' and '48000', +// however the framework attempts to open streams for other sample rates +// as well. The legacy r_submix implementation allowed that, but libaudiohal@aidl +// will not find a mix port to use. Because of that, list all sample rates that +// the legacy implementation allowed (note that mono was not allowed, the framework +// is expected to upmix mono tracks into stereo if needed). +// 3. The legacy implementation had a hard limit on the number of routes (10), +// and this is checked indirectly by AudioPlaybackCaptureTest#testPlaybackCaptureDoS +// CTS test. Instead of hardcoding the number of routes, we can use +// "maxOpen/ActiveStreamCount" to enforce a similar limit. However, the canonical +// XML file lacks this specification. +// +// Remote Submix configuration: +// +// Device ports: +// * "Remote Submix Out", OUT_SUBMIX +// - no profiles specified +// * "Remote Submix In", IN_SUBMIX +// - no profiles specified +// +// Mix ports: +// * "r_submix output", maximum 10 opened streams, maximum 10 active streams +// - profile PCM 16-bit; STEREO; 8000, 11025, 16000, 32000, 44100, 48000, 192000 +// * "r_submix input", maximum 10 opened streams, maximum 10 active streams +// - profile PCM 16-bit; STEREO; 8000, 11025, 16000, 32000, 44100, 48000, 192000 +// +// Routes: +// "r_submix output" -> "Remote Submix Out" +// "Remote Submix In" -> "r_submix input" +// +std::unique_ptr getRSubmixConfiguration() { + static const Configuration configuration = []() { + Configuration c; + const std::vector remoteSubmixPcmAudioProfiles{ + createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, + {8000, 11025, 16000, 32000, 44100, 48000, 192000})}; + + // Device ports + + AudioPort rsubmixOutDevice = + createPort(c.nextPortId++, "Remote Submix Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_SUBMIX, 0, + AudioDeviceDescription::CONNECTION_VIRTUAL)); + c.ports.push_back(rsubmixOutDevice); + c.connectedProfiles[rsubmixOutDevice.id] = remoteSubmixPcmAudioProfiles; + + AudioPort rsubmixInDevice = + createPort(c.nextPortId++, "Remote Submix In", 0, true, + createDeviceExt(AudioDeviceType::IN_SUBMIX, 0, + AudioDeviceDescription::CONNECTION_VIRTUAL)); + c.ports.push_back(rsubmixInDevice); + c.connectedProfiles[rsubmixInDevice.id] = remoteSubmixPcmAudioProfiles; + + // Mix ports + + AudioPort rsubmixOutMix = + createPort(c.nextPortId++, "r_submix output", 0, false, createPortMixExt(10, 10)); + rsubmixOutMix.profiles = remoteSubmixPcmAudioProfiles; + c.ports.push_back(rsubmixOutMix); + + AudioPort rsubmixInMix = + createPort(c.nextPortId++, "r_submix input", 0, true, createPortMixExt(10, 10)); + rsubmixInMix.profiles = remoteSubmixPcmAudioProfiles; + c.ports.push_back(rsubmixInMix); + + c.routes.push_back(createRoute({rsubmixOutMix}, rsubmixOutDevice)); + c.routes.push_back(createRoute({rsubmixInDevice}, rsubmixInMix)); + + return c; + }(); + return std::make_unique(configuration); +} + +// Usb configuration: +// +// Device ports: +// * "USB Device Out", OUT_DEVICE, CONNECTION_USB +// - no profiles specified +// * "USB Headset Out", OUT_HEADSET, CONNECTION_USB +// - no profiles specified +// * "USB Device In", IN_DEVICE, CONNECTION_USB +// - no profiles specified +// * "USB Headset In", IN_HEADSET, CONNECTION_USB +// - no profiles specified +// +// Mix ports: +// * "usb_device output", 1 max open, 1 max active stream +// - no profiles specified +// * "usb_device input", 1 max open, 1 max active stream +// - no profiles specified +// +// Routes: +// * "usb_device output" -> "USB Device Out" +// * "usb_device output" -> "USB Headset Out" +// * "USB Device In", "USB Headset In" -> "usb_device input" +// +// Profiles for device port connected state (when simulating connections): +// * "USB Device Out", "USB Headset Out": +// - profile PCM 16-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000 +// - profile PCM 24-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000 +// * "USB Device In", "USB Headset In": +// - profile PCM 16-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000 +// - profile PCM 24-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000 +// +std::unique_ptr getUsbConfiguration() { + static const Configuration configuration = []() { + const std::vector standardPcmAudioProfiles = { + createProfile(PcmType::INT_16_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO, + AudioChannelLayout::INDEX_MASK_1, AudioChannelLayout::INDEX_MASK_2}, + {44100, 48000}), + createProfile(PcmType::INT_24_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO, + AudioChannelLayout::INDEX_MASK_1, AudioChannelLayout::INDEX_MASK_2}, + {44100, 48000})}; + Configuration c; + + // Device ports + + AudioPort usbOutDevice = + createPort(c.nextPortId++, "USB Device Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_DEVICE, 0, + AudioDeviceDescription::CONNECTION_USB)); + c.ports.push_back(usbOutDevice); + c.connectedProfiles[usbOutDevice.id] = standardPcmAudioProfiles; + + AudioPort usbOutHeadset = + createPort(c.nextPortId++, "USB Headset Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_HEADSET, 0, + AudioDeviceDescription::CONNECTION_USB)); + c.ports.push_back(usbOutHeadset); + c.connectedProfiles[usbOutHeadset.id] = standardPcmAudioProfiles; + + AudioPort usbInDevice = createPort(c.nextPortId++, "USB Device In", 0, true, + createDeviceExt(AudioDeviceType::IN_DEVICE, 0, + AudioDeviceDescription::CONNECTION_USB)); + c.ports.push_back(usbInDevice); + c.connectedProfiles[usbInDevice.id] = standardPcmAudioProfiles; + + AudioPort usbInHeadset = + createPort(c.nextPortId++, "USB Headset In", 0, true, + createDeviceExt(AudioDeviceType::IN_HEADSET, 0, + AudioDeviceDescription::CONNECTION_USB)); + c.ports.push_back(usbInHeadset); + c.connectedProfiles[usbInHeadset.id] = standardPcmAudioProfiles; + + // Mix ports + + AudioPort usbDeviceOutMix = + createPort(c.nextPortId++, "usb_device output", 0, false, createPortMixExt(1, 1)); + c.ports.push_back(usbDeviceOutMix); + + AudioPort usbDeviceInMix = + createPort(c.nextPortId++, "usb_device input", 0, true, createPortMixExt(0, 1)); + c.ports.push_back(usbDeviceInMix); + + c.routes.push_back(createRoute({usbDeviceOutMix}, usbOutDevice)); + c.routes.push_back(createRoute({usbDeviceOutMix}, usbOutHeadset)); + c.routes.push_back(createRoute({usbInDevice, usbInHeadset}, usbDeviceInMix)); + + return c; + }(); + return std::make_unique(configuration); +} + +// Stub configuration: +// +// Device ports: +// * "Test Out", OUT_AFE_PROXY +// - no profiles specified +// * "Test In", IN_AFE_PROXY +// - no profiles specified +// * "Wired Headset", OUT_HEADSET +// - no profiles specified +// * "Wired Headset Mic", IN_HEADSET +// - no profiles specified +// +// Mix ports: +// * "test output", 1 max open, 1 max active stream +// - profile PCM 24-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000 +// * "test fast output", 1 max open, 1 max active stream +// - profile PCM 24-bit; STEREO; 44100, 48000 +// * "test compressed offload", DIRECT|COMPRESS_OFFLOAD|NON_BLOCKING, 1 max open, 1 max active +// stream +// - profile MP3; MONO, STEREO; 44100, 48000 +// * "test input", 2 max open, 2 max active streams +// - profile PCM 24-bit; MONO, STEREO, FRONT_BACK; +// 8000, 11025, 16000, 22050, 32000, 44100, 48000 +// +// Routes: +// "test output", "test fast output", "test compressed offload" -> "Test Out" +// "test output" -> "Wired Headset" +// "Test In", "Wired Headset Mic" -> "test input" +// +// Initial port configs: +// * "Test Out" device port: PCM 24-bit; STEREO; 48000 +// * "Test In" device port: PCM 24-bit; MONO; 48000 +// +// Profiles for device port connected state (when simulating connections): +// * "Wired Headset": dynamic profiles +// * "Wired Headset Mic": dynamic profiles +// +std::unique_ptr getStubConfiguration() { + static const Configuration configuration = []() { + Configuration c; + + // Device ports + + AudioPort testOutDevice = createPort(c.nextPortId++, "Test Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_AFE_PROXY, 0)); + c.ports.push_back(testOutDevice); + c.initialConfigs.push_back( + createPortConfig(testOutDevice.id, testOutDevice.id, PcmType::INT_24_BIT, + AudioChannelLayout::LAYOUT_STEREO, 48000, 0, false, + createDeviceExt(AudioDeviceType::OUT_AFE_PROXY, 0))); + + AudioPort headsetOutDevice = + createPort(c.nextPortId++, "Wired Headset", 0, false, + createDeviceExt(AudioDeviceType::OUT_HEADSET, 0, + AudioDeviceDescription::CONNECTION_ANALOG)); + c.ports.push_back(headsetOutDevice); + + AudioPort testInDevice = createPort(c.nextPortId++, "Test In", 0, true, + createDeviceExt(AudioDeviceType::IN_AFE_PROXY, 0)); + c.ports.push_back(testInDevice); + c.initialConfigs.push_back( + createPortConfig(testInDevice.id, testInDevice.id, PcmType::INT_24_BIT, + AudioChannelLayout::LAYOUT_MONO, 48000, 0, true, + createDeviceExt(AudioDeviceType::IN_AFE_PROXY, 0))); + + AudioPort headsetInDevice = + createPort(c.nextPortId++, "Wired Headset Mic", 0, true, + createDeviceExt(AudioDeviceType::IN_HEADSET, 0, + AudioDeviceDescription::CONNECTION_ANALOG)); + c.ports.push_back(headsetInDevice); + + // Mix ports + + AudioPort testOutMix = + createPort(c.nextPortId++, "test output", 0, false, createPortMixExt(1, 1)); + testOutMix.profiles.push_back( + createProfile(PcmType::INT_24_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO}, + {8000, 11025, 16000, 32000, 44100, 48000})); + c.ports.push_back(testOutMix); + + AudioPort testFastOutMix = createPort(c.nextPortId++, "test fast output", + makeBitPositionFlagMask({AudioOutputFlags::FAST}), + false, createPortMixExt(1, 1)); + testFastOutMix.profiles.push_back(createProfile( + PcmType::INT_24_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {44100, 48000})); + c.ports.push_back(testFastOutMix); + + AudioPort compressedOffloadOutMix = + createPort(c.nextPortId++, "test compressed offload", + makeBitPositionFlagMask({AudioOutputFlags::DIRECT, + AudioOutputFlags::COMPRESS_OFFLOAD, + AudioOutputFlags::NON_BLOCKING}), + false, createPortMixExt(1, 1)); + compressedOffloadOutMix.profiles.push_back( + createProfile(::android::MEDIA_MIMETYPE_AUDIO_MPEG, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO}, + {44100, 48000})); + c.ports.push_back(compressedOffloadOutMix); + + AudioPort testInMix = + createPort(c.nextPortId++, "test input", 0, true, createPortMixExt(2, 2)); + testInMix.profiles.push_back( + createProfile(PcmType::INT_16_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO, + AudioChannelLayout::LAYOUT_FRONT_BACK}, + {8000, 11025, 16000, 22050, 32000, 44100, 48000})); + testInMix.profiles.push_back( + createProfile(PcmType::INT_24_BIT, + {AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO, + AudioChannelLayout::LAYOUT_FRONT_BACK}, + {8000, 11025, 16000, 22050, 32000, 44100, 48000})); + c.ports.push_back(testInMix); + + c.routes.push_back( + createRoute({testOutMix, testFastOutMix, compressedOffloadOutMix}, testOutDevice)); + c.routes.push_back(createRoute({testOutMix}, headsetOutDevice)); + c.routes.push_back(createRoute({testInDevice, headsetInDevice}, testInMix)); + + c.portConfigs.insert(c.portConfigs.end(), c.initialConfigs.begin(), c.initialConfigs.end()); + + return c; + }(); + return std::make_unique(configuration); +} + +// Bluetooth configuration: +// +// Device ports: +// * "BT A2DP Out", OUT_DEVICE, CONNECTION_BT_A2DP +// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000 +// * "BT A2DP Headphones", OUT_HEADPHONE, CONNECTION_BT_A2DP +// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000 +// * "BT A2DP Speaker", OUT_SPEAKER, CONNECTION_BT_A2DP +// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000 +// * "BT Hearing Aid Out", OUT_HEARING_AID, CONNECTION_WIRELESS +// - no profiles specified +// +// Mix ports: +// * "a2dp output", 1 max open, 1 max active stream +// - no profiles specified +// * "hearing aid output", 1 max open, 1 max active stream +// - profile PCM 16-bit; STEREO; 16000, 24000 +// +// Routes: +// "a2dp output" -> "BT A2DP Out" +// "a2dp output" -> "BT A2DP Headphones" +// "a2dp output" -> "BT A2DP Speaker" +// "hearing aid output" -> "BT Hearing Aid Out" +// +// Profiles for device port connected state (when simulating connections): +// * "BT A2DP Out", "BT A2DP Headphones", "BT A2DP Speaker": +// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000 +// * "BT Hearing Aid Out": +// - profile PCM 16-bit; STEREO; 16000, 24000 +// +std::unique_ptr getBluetoothConfiguration() { + static const Configuration configuration = []() { + const std::vector standardPcmAudioProfiles = { + createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, + {44100, 48000, 88200, 96000})}; + const std::vector hearingAidAudioProfiles = {createProfile( + PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {16000, 24000})}; + Configuration c; + + // Device ports + AudioPort btOutDevice = + createPort(c.nextPortId++, "BT A2DP Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_DEVICE, 0, + AudioDeviceDescription::CONNECTION_BT_A2DP)); + btOutDevice.profiles.insert(btOutDevice.profiles.begin(), standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(btOutDevice); + c.connectedProfiles[btOutDevice.id] = standardPcmAudioProfiles; + + AudioPort btOutHeadphone = + createPort(c.nextPortId++, "BT A2DP Headphones", 0, false, + createDeviceExt(AudioDeviceType::OUT_HEADPHONE, 0, + AudioDeviceDescription::CONNECTION_BT_A2DP)); + btOutHeadphone.profiles.insert(btOutHeadphone.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(btOutHeadphone); + c.connectedProfiles[btOutHeadphone.id] = standardPcmAudioProfiles; + + AudioPort btOutSpeaker = + createPort(c.nextPortId++, "BT A2DP Speaker", 0, false, + createDeviceExt(AudioDeviceType::OUT_SPEAKER, 0, + AudioDeviceDescription::CONNECTION_BT_A2DP)); + btOutSpeaker.profiles.insert(btOutSpeaker.profiles.begin(), + standardPcmAudioProfiles.begin(), + standardPcmAudioProfiles.end()); + c.ports.push_back(btOutSpeaker); + c.connectedProfiles[btOutSpeaker.id] = standardPcmAudioProfiles; + + AudioPort btOutHearingAid = + createPort(c.nextPortId++, "BT Hearing Aid Out", 0, false, + createDeviceExt(AudioDeviceType::OUT_HEARING_AID, 0, + AudioDeviceDescription::CONNECTION_WIRELESS)); + c.ports.push_back(btOutHearingAid); + c.connectedProfiles[btOutHearingAid.id] = hearingAidAudioProfiles; + + // Mix ports + AudioPort btOutMix = + createPort(c.nextPortId++, "a2dp output", 0, false, createPortMixExt(1, 1)); + c.ports.push_back(btOutMix); + + AudioPort btHearingOutMix = + createPort(c.nextPortId++, "hearing aid output", 0, false, createPortMixExt(1, 1)); + btHearingOutMix.profiles = hearingAidAudioProfiles; + c.ports.push_back(btHearingOutMix); + + c.routes.push_back(createRoute({btOutMix}, btOutDevice)); + c.routes.push_back(createRoute({btOutMix}, btOutHeadphone)); + c.routes.push_back(createRoute({btOutMix}, btOutSpeaker)); + c.routes.push_back(createRoute({btHearingOutMix}, btOutHearingAid)); + + return c; + }(); + return std::make_unique(configuration); +} + +std::unique_ptr getConfiguration(Module::Type moduleType) { + switch (moduleType) { + case Module::Type::DEFAULT: + return getPrimaryConfiguration(); + case Module::Type::R_SUBMIX: + return getRSubmixConfiguration(); + case Module::Type::STUB: + return getStubConfiguration(); + case Module::Type::USB: + return getUsbConfiguration(); + case Module::Type::BLUETOOTH: + return getBluetoothConfiguration(); + } +} + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/EffectConfig.cpp b/audio/EffectConfig.cpp new file mode 100644 index 0000000..fa12056 --- /dev/null +++ b/audio/EffectConfig.cpp @@ -0,0 +1,357 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#define LOG_TAG "AHAL_EffectConfig" +#include +#include +#include +#include +#include +#include + +#include "effectFactory-impl/EffectConfig.h" + +#ifdef __ANDROID_APEX__ +#include +#endif + +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioSource; +using aidl::android::media::audio::common::AudioStreamType; +using aidl::android::media::audio::common::AudioUuid; + +namespace aidl::android::hardware::audio::effect { + +EffectConfig::EffectConfig(const std::string& file) { + tinyxml2::XMLDocument doc; + doc.LoadFile(file.c_str()); + // parse the xml file into maps + if (doc.Error()) { + LOG(ERROR) << __func__ << " tinyxml2 failed to load " << file + << " error: " << doc.ErrorStr(); + return; + } + + auto registerFailure = [&](bool result) { mSkippedElements += result ? 0 : 1; }; + + for (auto& xmlConfig : getChildren(doc, "audio_effects_conf")) { + // Parse library + for (auto& xmlLibraries : getChildren(xmlConfig, "libraries")) { + for (auto& xmlLibrary : getChildren(xmlLibraries, "library")) { + registerFailure(parseLibrary(xmlLibrary)); + } + } + + // Parse effects + for (auto& xmlEffects : getChildren(xmlConfig, "effects")) { + for (auto& xmlEffect : getChildren(xmlEffects)) { + registerFailure(parseEffect(xmlEffect)); + } + } + + // Parse pre processing chains + for (auto& xmlPreprocess : getChildren(xmlConfig, "preprocess")) { + for (auto& xmlStream : getChildren(xmlPreprocess, "stream")) { + // AudioSource + registerFailure(parseProcessing(Processing::Type::source, xmlStream)); + } + } + + // Parse post processing chains + for (auto& xmlPostprocess : getChildren(xmlConfig, "postprocess")) { + for (auto& xmlStream : getChildren(xmlPostprocess, "stream")) { + // AudioStreamType + registerFailure(parseProcessing(Processing::Type::streamType, xmlStream)); + } + } + + // Parse device effect chains + for (auto& xmlDeviceEffects : getChildren(xmlConfig, "deviceEffects")) { + for (auto& xmlDevice : getChildren(xmlDeviceEffects, "device")) { + // AudioDevice + registerFailure(parseProcessing(Processing::Type::device, xmlDevice)); + } + } + } + LOG(DEBUG) << __func__ << " successfully parsed " << file << ", skipping " << mSkippedElements + << " element(s)"; +} + +std::vector> EffectConfig::getChildren( + const tinyxml2::XMLNode& node, const char* childTag) { + std::vector> children; + for (auto* child = node.FirstChildElement(childTag); child != nullptr; + child = child->NextSiblingElement(childTag)) { + children.emplace_back(*child); + } + return children; +} + +bool EffectConfig::resolveLibrary(const std::string& path, std::string* resolvedPath) { +#ifdef __ANDROID_APEX__ + if constexpr (__ANDROID_VENDOR_API__ >= 202404) { + AApexInfo *apexInfo; + if (AApexInfo_create(&apexInfo) == AAPEXINFO_OK) { + std::string apexName(AApexInfo_getName(apexInfo)); + AApexInfo_destroy(apexInfo); + std::string candidatePath("/apex/"); + candidatePath.append(apexName).append(kEffectLibApexPath).append(path); + LOG(DEBUG) << __func__ << " effect lib path " << candidatePath; + if (access(candidatePath.c_str(), R_OK) == 0) { + *resolvedPath = std::move(candidatePath); + return true; + } + } + } else { + LOG(DEBUG) << __func__ << " libapexsupport is not supported"; + } +#endif + + // If audio effects libs are not in vendor apex, locate them in kEffectLibPath + for (auto* libraryDirectory : kEffectLibPath) { + std::string candidatePath = std::string(libraryDirectory) + '/' + path; + if (access(candidatePath.c_str(), R_OK) == 0) { + *resolvedPath = std::move(candidatePath); + return true; + } + } + return false; +} + +bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml) { + const char* name = xml.Attribute("name"); + RETURN_VALUE_IF(!name, false, "noNameAttribute"); + const char* path = xml.Attribute("path"); + RETURN_VALUE_IF(!path, false, "noPathAttribute"); + + std::string resolvedPath; + if (!resolveLibrary(path, &resolvedPath)) { + LOG(ERROR) << __func__ << " can't find " << path; + return false; + } + mLibraryMap[name] = resolvedPath; + LOG(DEBUG) << __func__ << " " << name << " : " << resolvedPath; + return true; +} + +bool EffectConfig::parseEffect(const tinyxml2::XMLElement& xml) { + struct EffectLibraries effectLibraries; + std::vector libraries; + std::string name = xml.Attribute("name"); + RETURN_VALUE_IF(name == "", false, "effectsNoName"); + + LOG(VERBOSE) << __func__ << dump(xml); + struct Library library; + if (std::strcmp(xml.Name(), "effectProxy") == 0) { + // proxy lib and uuid + RETURN_VALUE_IF(!parseLibrary(xml, library, true), false, "parseProxyLibFailed"); + effectLibraries.proxyLibrary = library; + // proxy effect libs and UUID + auto xmlProxyLib = xml.FirstChildElement(); + RETURN_VALUE_IF(!xmlProxyLib, false, "noLibForProxy"); + while (xmlProxyLib) { + struct Library tempLibrary; + RETURN_VALUE_IF(!parseLibrary(*xmlProxyLib, tempLibrary), false, + "parseEffectLibFailed"); + libraries.push_back(std::move(tempLibrary)); + xmlProxyLib = xmlProxyLib->NextSiblingElement(); + } + } else { + // expect only one library if not proxy + RETURN_VALUE_IF(!parseLibrary(xml, library), false, "parseEffectLibFailed"); + libraries.push_back(std::move(library)); + } + + effectLibraries.libraries = std::move(libraries); + mEffectsMap[name] = std::move(effectLibraries); + return true; +} + +bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml, struct Library& library, + bool isProxy) { + // Retrieve library name only if not effectProxy element + if (!isProxy) { + const char* name = xml.Attribute("library"); + RETURN_VALUE_IF(!name, false, "noLibraryAttribute"); + library.name = name; + } + + const char* uuidStr = xml.Attribute("uuid"); + RETURN_VALUE_IF(!uuidStr, false, "noUuidAttribute"); + library.uuid = stringToUuid(uuidStr); + if (const char* typeUuidStr = xml.Attribute("type")) { + library.type = stringToUuid(typeUuidStr); + } + RETURN_VALUE_IF((library.uuid == getEffectUuidZero()), false, "invalidUuidAttribute"); + + LOG(VERBOSE) << __func__ << (isProxy ? " proxy " : library.name) << " : uuid " + << ::android::audio::utils::toString(library.uuid) + << (library.type.has_value() + ? ::android::audio::utils::toString(library.type.value()) + : ""); + return true; +} + +std::optional EffectConfig::stringToProcessingType(Processing::Type::Tag typeTag, + const std::string& type, + const std::string& address) { + // see list of audio stream types in audio_stream_type_t: + // system/media/audio/include/system/audio_effects/audio_effects_conf.h + // AUDIO_STREAM_DEFAULT_TAG is not listed here because according to SYS_RESERVED_DEFAULT in + // AudioStreamType.aidl: "Value reserved for system use only. HALs must never return this value + // to the system or accept it from the system". + static const std::map sAudioStreamTypeTable = { + {AUDIO_STREAM_VOICE_CALL_TAG, AudioStreamType::VOICE_CALL}, + {AUDIO_STREAM_SYSTEM_TAG, AudioStreamType::SYSTEM}, + {AUDIO_STREAM_RING_TAG, AudioStreamType::RING}, + {AUDIO_STREAM_MUSIC_TAG, AudioStreamType::MUSIC}, + {AUDIO_STREAM_ALARM_TAG, AudioStreamType::ALARM}, + {AUDIO_STREAM_NOTIFICATION_TAG, AudioStreamType::NOTIFICATION}, + {AUDIO_STREAM_BLUETOOTH_SCO_TAG, AudioStreamType::BLUETOOTH_SCO}, + {AUDIO_STREAM_ENFORCED_AUDIBLE_TAG, AudioStreamType::ENFORCED_AUDIBLE}, + {AUDIO_STREAM_DTMF_TAG, AudioStreamType::DTMF}, + {AUDIO_STREAM_TTS_TAG, AudioStreamType::TTS}, + {AUDIO_STREAM_ASSISTANT_TAG, AudioStreamType::ASSISTANT}}; + + // see list of audio sources in audio_source_t: + // system/media/audio/include/system/audio_effects/audio_effects_conf.h + static const std::map sAudioSourceTable = { + {MIC_SRC_TAG, AudioSource::MIC}, + {VOICE_UL_SRC_TAG, AudioSource::VOICE_UPLINK}, + {VOICE_DL_SRC_TAG, AudioSource::VOICE_DOWNLINK}, + {VOICE_CALL_SRC_TAG, AudioSource::VOICE_CALL}, + {CAMCORDER_SRC_TAG, AudioSource::CAMCORDER}, + {VOICE_REC_SRC_TAG, AudioSource::VOICE_RECOGNITION}, + {VOICE_COMM_SRC_TAG, AudioSource::VOICE_COMMUNICATION}, + {REMOTE_SUBMIX_SRC_TAG, AudioSource::REMOTE_SUBMIX}, + {UNPROCESSED_SRC_TAG, AudioSource::UNPROCESSED}, + {VOICE_PERFORMANCE_SRC_TAG, AudioSource::VOICE_PERFORMANCE}}; + + if (typeTag == Processing::Type::streamType) { + auto typeIter = sAudioStreamTypeTable.find(type); + if (typeIter != sAudioStreamTypeTable.end()) { + return typeIter->second; + } + } else if (typeTag == Processing::Type::source) { + auto typeIter = sAudioSourceTable.find(type); + if (typeIter != sAudioSourceTable.end()) { + return typeIter->second; + } + } else if (typeTag == Processing::Type::device) { + audio_devices_t deviceType; + if (!audio_device_from_string(type.c_str(), &deviceType)) { + LOG(ERROR) << __func__ << "DeviceEffect: invalid type " << type; + return std::nullopt; + } + auto ret = ::aidl::android::legacy2aidl_audio_device_AudioDevice(deviceType, address); + if (!ret.ok()) { + LOG(ERROR) << __func__ << "DeviceEffect: Failed to get AudioDevice from type " + << deviceType << ", address " << address; + return std::nullopt; + } + return ret.value(); + } + + return std::nullopt; +} + +bool EffectConfig::parseProcessing(Processing::Type::Tag typeTag, const tinyxml2::XMLElement& xml) { + LOG(VERBOSE) << __func__ << dump(xml); + const char* typeStr = xml.Attribute("type"); + const char* addressStr = xml.Attribute("address"); + // For device effect, device address is optional, match will be done for the given device type + // with empty address. + auto aidlType = stringToProcessingType(typeTag, typeStr, addressStr ? addressStr : ""); + RETURN_VALUE_IF(!aidlType.has_value(), false, "illegalStreamType"); + RETURN_VALUE_IF(0 != mProcessingMap.count(aidlType.value()), false, "duplicateStreamType"); + + for (auto& apply : getChildren(xml, "apply")) { + const char* name = apply.get().Attribute("effect"); + if (mEffectsMap.find(name) == mEffectsMap.end()) { + LOG(ERROR) << __func__ << " effect " << name << " doesn't exist, skipping"; + continue; + } + RETURN_VALUE_IF(!name, false, "noEffectAttribute"); + mProcessingMap[aidlType.value()].emplace_back(mEffectsMap[name]); + } + return true; +} + +const std::map>& +EffectConfig::getProcessingMap() const { + return mProcessingMap; +} + +bool EffectConfig::findUuid(const std::pair& effectElem, + AudioUuid* uuid) { +// Difference from EFFECT_TYPE_LIST_DEF, there could be multiple name mapping to same Effect Type +#define EFFECT_XML_TYPE_LIST_DEF(V) \ + V("acoustic_echo_canceler", AcousticEchoCanceler) \ + V("automatic_gain_control_v1", AutomaticGainControlV1) \ + V("automatic_gain_control_v2", AutomaticGainControlV2) \ + V("bassboost", BassBoost) \ + V("downmix", Downmix) \ + V("dynamics_processing", DynamicsProcessing) \ + V("equalizer", Equalizer) \ + V("extensioneffect", Extension) \ + V("haptic_generator", HapticGenerator) \ + V("loudness_enhancer", LoudnessEnhancer) \ + V("env_reverb", EnvReverb) \ + V("reverb_env_aux", EnvReverb) \ + V("reverb_env_ins", EnvReverb) \ + V("preset_reverb", PresetReverb) \ + V("reverb_pre_aux", PresetReverb) \ + V("reverb_pre_ins", PresetReverb) \ + V("noise_suppression", NoiseSuppression) \ + V("spatializer", Spatializer) \ + V("virtualizer", Virtualizer) \ + V("visualizer", Visualizer) \ + V("volume", Volume) + +#define GENERATE_MAP_ENTRY_V(s, symbol) {s, &getEffectTypeUuid##symbol}, + + const std::string xmlEffectName = effectElem.first; + typedef const AudioUuid& (*UuidGetter)(void); + static const std::map uuidMap{ + // std::make_pair("s", &getEffectTypeUuidExtension)}; + {EFFECT_XML_TYPE_LIST_DEF(GENERATE_MAP_ENTRY_V)}}; + if (auto it = uuidMap.find(xmlEffectName); it != uuidMap.end()) { + *uuid = (*it->second)(); + return true; + } + + const auto& libs = effectElem.second.libraries; + for (const auto& lib : libs) { + if (lib.type.has_value()) { + *uuid = lib.type.value(); + return true; + } + } + return false; +} + +const char* EffectConfig::dump(const tinyxml2::XMLElement& element, + tinyxml2::XMLPrinter&& printer) const { + element.Accept(&printer); + return printer.CStr(); +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/EffectContext.cpp b/audio/EffectContext.cpp new file mode 100644 index 0000000..b354dd1 --- /dev/null +++ b/audio/EffectContext.cpp @@ -0,0 +1,275 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#define LOG_TAG "AHAL_EffectContext" +#include "effect-impl/EffectContext.h" +#include "include/effect-impl/EffectTypes.h" + +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::hardware::audio::common::getFrameSizeInBytes; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::kReopenSupportedVersion; +using aidl::android::media::audio::common::PcmType; +using ::android::hardware::EventFlag; + +namespace aidl::android::hardware::audio::effect { + +EffectContext::EffectContext(size_t statusDepth, const Parameter::Common& common) { + LOG_ALWAYS_FATAL_IF(RetCode::SUCCESS != setCommon(common), "illegalCommonParameter"); + + // in/outBuffer size in float (FMQ data format defined for DataMQ) + size_t inBufferSizeInFloat = common.input.frameCount * mInputFrameSize / sizeof(float); + size_t outBufferSizeInFloat = common.output.frameCount * mOutputFrameSize / sizeof(float); + + // only status FMQ use the EventFlag + mStatusMQ = std::make_shared(statusDepth, true /*configureEventFlagWord*/); + mInputMQ = std::make_shared(inBufferSizeInFloat); + mOutputMQ = std::make_shared(outBufferSizeInFloat); + + if (!mStatusMQ->isValid() || !mInputMQ->isValid() || !mOutputMQ->isValid()) { + LOG(ERROR) << __func__ << " created invalid FMQ, statusMQ: " << mStatusMQ->isValid() + << " inputMQ: " << mInputMQ->isValid() << " outputMQ: " << mOutputMQ->isValid(); + } + + ::android::status_t status = + EventFlag::createEventFlag(mStatusMQ->getEventFlagWord(), &mEfGroup); + LOG_ALWAYS_FATAL_IF(status != ::android::OK || !mEfGroup, " create EventFlagGroup failed "); + mWorkBuffer.resize(std::max(inBufferSizeInFloat, outBufferSizeInFloat)); +} + +// reset buffer status by abandon input data in FMQ +void EffectContext::resetBuffer() { + auto buffer = static_cast(mWorkBuffer.data()); + if (mStatusMQ) { + std::vector status(mStatusMQ->availableToRead()); + } + if (mInputMQ) { + mInputMQ->read(buffer, mInputMQ->availableToRead()); + } +} + +void EffectContext::dupeFmqWithReopen(IEffect::OpenEffectReturn* effectRet) { + const size_t inBufferSizeInFloat = mCommon.input.frameCount * mInputFrameSize / sizeof(float); + const size_t outBufferSizeInFloat = + mCommon.output.frameCount * mOutputFrameSize / sizeof(float); + const size_t bufferSize = std::max(inBufferSizeInFloat, outBufferSizeInFloat); + if (!mInputMQ) { + mInputMQ = std::make_shared(inBufferSizeInFloat); + } + if (!mOutputMQ) { + mOutputMQ = std::make_shared(outBufferSizeInFloat); + } + if (mWorkBuffer.size() != bufferSize) { + mWorkBuffer.resize(bufferSize); + } + dupeFmq(effectRet); +} + +void EffectContext::dupeFmq(IEffect::OpenEffectReturn* effectRet) { + if (effectRet && mStatusMQ && mInputMQ && mOutputMQ) { + effectRet->statusMQ = mStatusMQ->dupeDesc(); + effectRet->inputDataMQ = mInputMQ->dupeDesc(); + effectRet->outputDataMQ = mOutputMQ->dupeDesc(); + } +} + +float* EffectContext::getWorkBuffer() { + return static_cast(mWorkBuffer.data()); +} + +size_t EffectContext::getWorkBufferSize() const { + return mWorkBuffer.size(); +} + +std::shared_ptr EffectContext::getStatusFmq() const { + return mStatusMQ; +} + +std::shared_ptr EffectContext::getInputDataFmq() const { + return mInputMQ; +} + +std::shared_ptr EffectContext::getOutputDataFmq() const { + return mOutputMQ; +} + +size_t EffectContext::getInputFrameSize() const { + return mInputFrameSize; +} + +size_t EffectContext::getOutputFrameSize() const { + return mOutputFrameSize; +} + +int EffectContext::getSessionId() const { + return mCommon.session; +} + +int EffectContext::getIoHandle() const { + return mCommon.ioHandle; +} + +RetCode EffectContext::setOutputDevice( + const std::vector& device) { + mOutputDevice = device; + return RetCode::SUCCESS; +} + +std::vector +EffectContext::getOutputDevice() { + return mOutputDevice; +} + +RetCode EffectContext::setAudioMode(const aidl::android::media::audio::common::AudioMode& mode) { + mMode = mode; + return RetCode::SUCCESS; +} +aidl::android::media::audio::common::AudioMode EffectContext::getAudioMode() { + return mMode; +} + +RetCode EffectContext::setAudioSource( + const aidl::android::media::audio::common::AudioSource& source) { + mSource = source; + return RetCode::SUCCESS; +} + +aidl::android::media::audio::common::AudioSource EffectContext::getAudioSource() { + return mSource; +} + +RetCode EffectContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) { + mVolumeStereo = volumeStereo; + return RetCode::SUCCESS; +} + +Parameter::VolumeStereo EffectContext::getVolumeStereo() { + return mVolumeStereo; +} + +RetCode EffectContext::setCommon(const Parameter::Common& common) { + auto& input = common.input; + auto& output = common.output; + + if (input.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT || + output.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT) { + LOG(ERROR) << __func__ << " illegal IO, input " + << ::android::internal::ToString(input.base.format) << ", output " + << ::android::internal::ToString(output.base.format); + return RetCode::ERROR_ILLEGAL_PARAMETER; + } + + if (auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) { + return ret; + } + + mInputChannelCount = getChannelCount(input.base.channelMask); + mOutputChannelCount = getChannelCount(output.base.channelMask); + if (mInputChannelCount == 0 || mOutputChannelCount == 0) { + LOG(ERROR) << __func__ << " illegal channel count input " << mInputChannelCount + << ", output " << mOutputChannelCount; + return RetCode::ERROR_ILLEGAL_PARAMETER; + } + + mCommon = common; + return RetCode::SUCCESS; +} + +Parameter::Common EffectContext::getCommon() { + return mCommon; +} + +EventFlag* EffectContext::getStatusEventFlag() { + return mEfGroup; +} + +RetCode EffectContext::updateIOFrameSize(const Parameter::Common& common) { + const auto prevInputFrameSize = mInputFrameSize; + const auto prevOutputFrameSize = mOutputFrameSize; + mInputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes( + common.input.base.format, common.input.base.channelMask); + mOutputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes( + common.output.base.format, common.output.base.channelMask); + + // workBuffer and data MQ not allocated yet, no need to update + if (mWorkBuffer.size() == 0 || !mInputMQ || !mOutputMQ) { + return RetCode::SUCCESS; + } + // IEffect::reopen introduced in android.hardware.audio.effect-V2 + if (mVersion < kReopenSupportedVersion) { + LOG(WARNING) << __func__ << " skipped for HAL version " << mVersion; + return RetCode::SUCCESS; + } + bool needUpdateMq = false; + if (mInputFrameSize != prevInputFrameSize || + mCommon.input.frameCount != common.input.frameCount) { + mInputMQ.reset(); + needUpdateMq = true; + } + if (mOutputFrameSize != prevOutputFrameSize || + mCommon.output.frameCount != common.output.frameCount) { + mOutputMQ.reset(); + needUpdateMq = true; + } + + if (needUpdateMq) { + return notifyDataMqUpdate(); + } + return RetCode::SUCCESS; +} + +RetCode EffectContext::notifyDataMqUpdate() { + if (!mEfGroup) { + LOG(ERROR) << __func__ << ": invalid EventFlag group"; + return RetCode::ERROR_EVENT_FLAG_ERROR; + } + + if (const auto ret = mEfGroup->wake(kEventFlagDataMqUpdate); ret != ::android::OK) { + LOG(ERROR) << __func__ << ": wake failure with ret " << ret; + return RetCode::ERROR_EVENT_FLAG_ERROR; + } + LOG(VERBOSE) << __func__ << " : signal client for reopen"; + return RetCode::SUCCESS; +} + +RetCode EffectContext::enable() { + return RetCode::SUCCESS; +} + +RetCode EffectContext::disable() { + return RetCode::SUCCESS; +} + +RetCode EffectContext::reset() { + return RetCode::SUCCESS; +} + +RetCode EffectContext::startDraining() { + mIsDraining = true; + return RetCode::SUCCESS; +} + +RetCode EffectContext::finishDraining() { + mIsDraining = false; + return RetCode::SUCCESS; +} + +bool EffectContext::isDraining() { + return mIsDraining; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/EffectFactory.cpp b/audio/EffectFactory.cpp new file mode 100644 index 0000000..e3894f9 --- /dev/null +++ b/audio/EffectFactory.cpp @@ -0,0 +1,300 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#define LOG_TAG "AHAL_EffectFactory" + +#include +#include +#include +#include +#include + +#include "effect-impl/EffectTypes.h" +#include "effectFactory-impl/EffectFactory.h" + +using aidl::android::media::audio::common::AudioUuid; + +namespace aidl::android::hardware::audio::effect { + +Factory::Factory(const std::string& file) : mConfig(EffectConfig(file)) { + LOG(DEBUG) << __func__ << " with config file: " << file; + loadEffectLibs(); +} + +Factory::~Factory() { + if (auto count = mEffectMap.size()) { + LOG(WARNING) << __func__ << " remaining " << count + << " effect instances not destroyed indicating resource leak!"; + for (const auto& it : mEffectMap) { + if (auto spEffect = it.first.lock()) { + LOG(WARNING) << __func__ << " erase remaining instance UUID " + << ::android::audio::utils::toString(it.second.first); + destroyEffectImpl_l(spEffect); + } + } + } +} + +ndk::ScopedAStatus Factory::getDescriptorWithUuid_l(const AudioUuid& uuid, Descriptor* desc) { + RETURN_IF(!desc, EX_NULL_POINTER, "nullDescriptor"); + + if (mEffectLibMap.count(uuid)) { + auto& entry = mEffectLibMap[uuid]; + getDlSyms_l(entry); + auto& libInterface = std::get(entry); + RETURN_IF(!libInterface || !libInterface->queryEffectFunc, EX_NULL_POINTER, + "dlNullQueryEffectFunc"); + RETURN_IF_BINDER_EXCEPTION(libInterface->queryEffectFunc(&uuid, desc)); + return ndk::ScopedAStatus::ok(); + } + + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Factory::queryEffects(const std::optional& in_type_uuid, + const std::optional& in_impl_uuid, + const std::optional& in_proxy_uuid, + std::vector* _aidl_return) { + std::lock_guard lg(mMutex); + // get the matching list + std::vector idList; + std::copy_if(mIdentitySet.begin(), mIdentitySet.end(), std::back_inserter(idList), + [&](auto& id) { + return (!in_type_uuid.has_value() || in_type_uuid.value() == id.type) && + (!in_impl_uuid.has_value() || in_impl_uuid.value() == id.uuid) && + (!in_proxy_uuid.has_value() || + (id.proxy.has_value() && in_proxy_uuid.value() == id.proxy.value())); + }); + // query through the matching list + for (const auto& id : idList) { + if (mEffectLibMap.count(id.uuid)) { + Descriptor desc; + RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(id.uuid, &desc), + "getDescriptorFailed"); + // update proxy UUID with information from config xml + desc.common.id.proxy = id.proxy; + _aidl_return->emplace_back(std::move(desc)); + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Factory::queryProcessing(const std::optional& in_type, + std::vector* _aidl_return) { + std::lock_guard lg(mMutex); + const auto& processings = mConfig.getProcessingMap(); + // Processing stream type + for (const auto& procIter : processings) { + if (!in_type.has_value() || in_type.value() == procIter.first) { + Processing process = {.type = procIter.first /* Processing::Type */}; + for (const auto& libs : procIter.second /* std::vector */) { + for (const auto& lib : libs.libraries /* std::vector */) { + Descriptor desc; + if (libs.proxyLibrary.has_value()) { + desc.common.id.proxy = libs.proxyLibrary.value().uuid; + } + RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(lib.uuid, &desc), + "getDescriptorFailed"); + process.ids.emplace_back(desc); + } + } + _aidl_return->emplace_back(process); + } + } + + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Factory::createEffect(const AudioUuid& in_impl_uuid, + std::shared_ptr* _aidl_return) { + LOG(DEBUG) << __func__ << ": UUID " << ::android::audio::utils::toString(in_impl_uuid); + std::lock_guard lg(mMutex); + if (mEffectLibMap.count(in_impl_uuid)) { + auto& entry = mEffectLibMap[in_impl_uuid]; + getDlSyms_l(entry); + + auto& libInterface = std::get(entry); + RETURN_IF(!libInterface || !libInterface->createEffectFunc, EX_NULL_POINTER, + "dlNullcreateEffectFunc"); + std::shared_ptr effectSp; + RETURN_IF_BINDER_EXCEPTION(libInterface->createEffectFunc(&in_impl_uuid, &effectSp)); + if (!effectSp) { + LOG(WARNING) << __func__ << ": library created null instance without return error!"; + return ndk::ScopedAStatus::fromExceptionCode(EX_TRANSACTION_FAILED); + } + *_aidl_return = effectSp; + ndk::SpAIBinder effectBinder = effectSp->asBinder(); + AIBinder_setMinSchedulerPolicy(effectBinder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO); + AIBinder_setInheritRt(effectBinder.get(), true); + mEffectMap[std::weak_ptr(effectSp)] = + std::make_pair(in_impl_uuid, std::move(effectBinder)); + return ndk::ScopedAStatus::ok(); + } else { + LOG(ERROR) << __func__ << ": library doesn't exist"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Factory::destroyEffectImpl_l(const std::shared_ptr& in_handle) { + std::weak_ptr wpHandle(in_handle); + // find the effect entry with key (std::weak_ptr) + if (auto effectIt = mEffectMap.find(wpHandle); effectIt != mEffectMap.end()) { + auto& uuid = effectIt->second.first; + // find implementation library with UUID + if (auto libIt = mEffectLibMap.find(uuid); libIt != mEffectLibMap.end()) { + auto& interface = std::get(libIt->second); + RETURN_IF(!interface || !interface->destroyEffectFunc, EX_NULL_POINTER, + "dlNulldestroyEffectFunc"); + RETURN_IF_BINDER_EXCEPTION(interface->destroyEffectFunc(in_handle)); + } else { + LOG(ERROR) << __func__ << ": UUID " << ::android::audio::utils::toString(uuid) + << " does not exist in libMap!"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + mEffectMap.erase(effectIt); + return ndk::ScopedAStatus::ok(); + } else { + LOG(ERROR) << __func__ << ": instance " << in_handle << " does not exist!"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } +} + +// go over the map and cleanup all expired weak_ptrs. +void Factory::cleanupEffectMap_l() { + for (auto it = mEffectMap.begin(); it != mEffectMap.end();) { + if (nullptr == it->first.lock()) { + it = mEffectMap.erase(it); + } else { + ++it; + } + } +} + +ndk::ScopedAStatus Factory::destroyEffect(const std::shared_ptr& in_handle) { + std::lock_guard lg(mMutex); + ndk::ScopedAStatus status = destroyEffectImpl_l(in_handle); + // always do the cleanup + cleanupEffectMap_l(); + return status; +} + +bool Factory::openEffectLibrary(const AudioUuid& impl, + const std::string& path) NO_THREAD_SAFETY_ANALYSIS { + std::function dlClose = [](void* handle) -> void { + if (handle && dlclose(handle)) { + LOG(ERROR) << "dlclose failed " << dlerror(); + } + }; + + auto libHandle = + std::unique_ptr{dlopen(path.c_str(), RTLD_LAZY), dlClose}; + if (!libHandle) { + LOG(ERROR) << __func__ << ": dlopen failed, err: " << dlerror(); + return false; + } + + LOG(DEBUG) << __func__ << " dlopen lib: " << path + << "\nimpl:" << ::android::audio::utils::toString(impl) << "\nhandle:" << libHandle; + auto interface = new effect_dl_interface_s{nullptr, nullptr, nullptr}; + mEffectLibMap.insert( + {impl, + std::make_tuple(std::move(libHandle), + std::unique_ptr(interface), path)}); + return true; +} + +void Factory::createIdentityWithConfig( + const EffectConfig::Library& configLib, const AudioUuid& typeUuid, + const std::optional proxyUuid) NO_THREAD_SAFETY_ANALYSIS { + static const auto& libMap = mConfig.getLibraryMap(); + const std::string& libName = configLib.name; + if (auto path = libMap.find(libName); path != libMap.end()) { + Descriptor::Identity id; + id.type = typeUuid; + id.uuid = configLib.uuid; + id.proxy = proxyUuid; + LOG(WARNING) << __func__ << " loading lib " << path->second << ": typeUuid " + << ::android::audio::utils::toString(id.type) << "\nimplUuid " + << ::android::audio::utils::toString(id.uuid) << " proxyUuid " + << (proxyUuid.has_value() + ? ::android::audio::utils::toString(proxyUuid.value()) + : "null"); + if (openEffectLibrary(id.uuid, path->second)) { + mIdentitySet.insert(std::move(id)); + } + } else { + LOG(ERROR) << __func__ << ": library " << libName << " not exist!"; + return; + } +} + +void Factory::loadEffectLibs() { + const auto& configEffectsMap = mConfig.getEffectsMap(); + for (const auto& configEffects : configEffectsMap) { + if (AudioUuid type; EffectConfig::findUuid(configEffects /* xml effect */, &type)) { + const auto& configLibs = configEffects.second; + std::optional proxyUuid; + if (configLibs.proxyLibrary.has_value()) { + const auto& proxyLib = configLibs.proxyLibrary.value(); + proxyUuid = proxyLib.uuid; + } + for (const auto& configLib : configLibs.libraries) { + createIdentityWithConfig(configLib, type, proxyUuid); + } + } else { + LOG(WARNING) << __func__ << ": can not find type UUID for effect " + << configEffects.first << " skipping!"; + } + } +} + +void Factory::getDlSyms_l(DlEntry& entry) { + auto& dlHandle = std::get(entry); + RETURN_VALUE_IF(!dlHandle, void(), "dlNullHandle"); + // Get the reference of the DL interfaces in library map tuple. + auto& dlInterface = std::get(entry); + // return if interface already exist + if (!dlInterface->createEffectFunc) { + dlInterface->createEffectFunc = (EffectCreateFunctor)dlsym(dlHandle.get(), "createEffect"); + } + if (!dlInterface->queryEffectFunc) { + dlInterface->queryEffectFunc = (EffectQueryFunctor)dlsym(dlHandle.get(), "queryEffect"); + } + if (!dlInterface->destroyEffectFunc) { + dlInterface->destroyEffectFunc = + (EffectDestroyFunctor)dlsym(dlHandle.get(), "destroyEffect"); + } + + if (!dlInterface->createEffectFunc || !dlInterface->destroyEffectFunc || + !dlInterface->queryEffectFunc) { + LOG(ERROR) << __func__ << ": create (" << dlInterface->createEffectFunc << "), query (" + << dlInterface->queryEffectFunc << "), or destroy (" + << dlInterface->destroyEffectFunc + << ") not exist in library: " << std::get(entry) + << " handle: " << dlHandle << " with dlerror: " << dlerror(); + return; + } +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/EffectImpl.cpp b/audio/EffectImpl.cpp new file mode 100644 index 0000000..97f7286 --- /dev/null +++ b/audio/EffectImpl.cpp @@ -0,0 +1,418 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#define ATRACE_TAG ATRACE_TAG_AUDIO +#define LOG_TAG "AHAL_EffectImpl" +#include +#include "effect-impl/EffectImpl.h" +#include "effect-impl/EffectTypes.h" +#include "include/effect-impl/EffectTypes.h" + +using aidl::android::hardware::audio::effect::CommandId; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::kDestroyAnyStateSupportedVersion; +using aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty; +using aidl::android::hardware::audio::effect::kEventFlagNotEmpty; +using aidl::android::hardware::audio::effect::kReopenSupportedVersion; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::PcmType; +using ::android::hardware::EventFlag; + +extern "C" binder_exception_t destroyEffect(const std::shared_ptr& instanceSp) { + if (!instanceSp) { + LOG(ERROR) << __func__ << " nullptr"; + return EX_ILLEGAL_ARGUMENT; + } + + Descriptor desc; + ndk::ScopedAStatus status = instanceSp->getDescriptor(&desc); + if (!status.isOk()) { + LOG(ERROR) << __func__ << " instance " << instanceSp.get() + << " failed to get descriptor, status: " << status.getDescription(); + return EX_ILLEGAL_STATE; + } + + State state; + status = instanceSp->getState(&state); + if (!status.isOk()) { + LOG(ERROR) << __func__ << " " << desc.common.name << " instance " << instanceSp.get() + << " in state: " << toString(state) << ", status: " << status.getDescription(); + return EX_ILLEGAL_STATE; + } + + int effectVersion = 0; + if (!instanceSp->getInterfaceVersion(&effectVersion).isOk()) { + LOG(WARNING) << __func__ << " " << desc.common.name << " failed to get interface version"; + } + + if (effectVersion < kDestroyAnyStateSupportedVersion) { + if (State::INIT != state) { + LOG(ERROR) << __func__ << " " << desc.common.name << " can not destroy instance " + << instanceSp.get() << " in state: " << toString(state); + return EX_ILLEGAL_STATE; + } + } else { + instanceSp->command(CommandId::RESET); + instanceSp->close(); + } + + LOG(DEBUG) << __func__ << " " << desc.common.name << " instance " << instanceSp.get() + << " destroyed"; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +ndk::ScopedAStatus EffectImpl::open(const Parameter::Common& common, + const std::optional& specific, + OpenEffectReturn* ret) { + // effect only support 32bits float + RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm || + common.input.base.format.pcm != PcmType::FLOAT_32_BIT, + EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat"); + + std::lock_guard lg(mImplMutex); + RETURN_OK_IF(mState != State::INIT); + mImplContext = createContext(common); + RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext"); + + RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION, + "FailedToGetInterfaceVersion"); + mImplContext->setVersion(mVersion); + mEventFlag = mImplContext->getStatusEventFlag(); + mDataMqNotEmptyEf = + mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty; + + if (specific.has_value()) { + RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr"); + } + + mState = State::IDLE; + mImplContext->dupeFmq(ret); + RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS, + EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker"); + LOG(INFO) << getEffectNameWithVersion() << __func__; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::reopen(OpenEffectReturn* ret) { + std::lock_guard lg(mImplMutex); + RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "alreadyClosed"); + + RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext"); + mImplContext->dupeFmqWithReopen(ret); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::close() { + { + std::lock_guard lg(mImplMutex); + RETURN_OK_IF(mState == State::INIT); + RETURN_IF(mState == State::PROCESSING, EX_ILLEGAL_STATE, "closeAtProcessing"); + mState = State::INIT; + } + + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + // stop the worker thread, ignore the return code + RETURN_IF(destroyThread() != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION, + "FailedToDestroyWorker"); + + { + std::lock_guard lg(mImplMutex); + releaseContext(); + mImplContext.reset(); + } + + LOG(INFO) << getEffectNameWithVersion() << __func__; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::setParameter(const Parameter& param) { + std::lock_guard lg(mImplMutex); + LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << " with: " << param.toString(); + + const auto& tag = param.getTag(); + switch (tag) { + case Parameter::common: + case Parameter::deviceDescription: + case Parameter::mode: + case Parameter::source: + FALLTHROUGH_INTENDED; + case Parameter::volumeStereo: + return setParameterCommon(param); + case Parameter::specific: { + return setParameterSpecific(param.get()); + } + default: { + LOG(ERROR) << getEffectNameWithVersion() << __func__ << " unsupportedParameterTag " + << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "ParameterNotSupported"); + } + } +} + +ndk::ScopedAStatus EffectImpl::getParameter(const Parameter::Id& id, Parameter* param) { + std::lock_guard lg(mImplMutex); + switch (id.getTag()) { + case Parameter::Id::commonTag: { + RETURN_IF_ASTATUS_NOT_OK(getParameterCommon(id.get(), param), + "CommonParamNotSupported"); + break; + } + case Parameter::Id::vendorEffectTag: + FALLTHROUGH_INTENDED; + default: { + Parameter::Specific specific; + RETURN_IF_ASTATUS_NOT_OK(getParameterSpecific(id, &specific), "SpecParamNotSupported"); + param->set(specific); + break; + } + } + LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << id.toString() << param->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::setParameterCommon(const Parameter& param) { + RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext"); + + const auto& tag = param.getTag(); + switch (tag) { + case Parameter::common: + RETURN_IF(mImplContext->setCommon(param.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setCommFailed"); + break; + case Parameter::deviceDescription: + RETURN_IF(mImplContext->setOutputDevice(param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDeviceFailed"); + break; + case Parameter::mode: + RETURN_IF(mImplContext->setAudioMode(param.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setModeFailed"); + break; + case Parameter::source: + RETURN_IF(mImplContext->setAudioSource(param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setSourceFailed"); + break; + case Parameter::volumeStereo: + RETURN_IF(mImplContext->setVolumeStereo(param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed"); + break; + default: { + LOG(ERROR) << getEffectNameWithVersion() << __func__ << " unsupportedParameterTag " + << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "commonParamNotSupported"); + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::getParameterCommon(const Parameter::Tag& tag, Parameter* param) { + RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext"); + + switch (tag) { + case Parameter::common: { + param->set(mImplContext->getCommon()); + break; + } + case Parameter::deviceDescription: { + param->set(mImplContext->getOutputDevice()); + break; + } + case Parameter::mode: { + param->set(mImplContext->getAudioMode()); + break; + } + case Parameter::source: { + param->set(mImplContext->getAudioSource()); + break; + } + case Parameter::volumeStereo: { + param->set(mImplContext->getVolumeStereo()); + break; + } + default: { + LOG(DEBUG) << getEffectNameWithVersion() << __func__ << " unsupported tag " + << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "tagNotSupported"); + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::getState(State* state) NO_THREAD_SAFETY_ANALYSIS { + *state = mState; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::command(CommandId command) { + std::lock_guard lg(mImplMutex); + RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "instanceNotOpen"); + + switch (command) { + case CommandId::START: + RETURN_OK_IF(mState == State::PROCESSING); + RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed"); + mState = State::PROCESSING; + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + startThread(); + break; + case CommandId::STOP: + RETURN_OK_IF(mState == State::IDLE); + mState = State::IDLE; + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + stopThread(); + RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed"); + break; + case CommandId::RESET: + mState = State::IDLE; + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + stopThread(); + RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed"); + break; + default: + LOG(ERROR) << getEffectNameWithVersion() << __func__ << " instance still processing"; + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "CommandIdNotSupported"); + } + LOG(VERBOSE) << getEffectNameWithVersion() << __func__ + << " transfer to state: " << toString(mState); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EffectImpl::commandImpl(CommandId command) { + RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext"); + switch (command) { + case CommandId::START: + mImplContext->enable(); + break; + case CommandId::STOP: + mImplContext->disable(); + break; + case CommandId::RESET: + mImplContext->disable(); + mImplContext->reset(); + mImplContext->resetBuffer(); + break; + default: + LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported"; + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "commandIdNotSupported"); + } + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr EffectImpl::createContext(const Parameter::Common& common) { + return std::make_shared(1 /* statusMqDepth */, common); +} + +RetCode EffectImpl::releaseContext() { + if (mImplContext) { + mImplContext.reset(); + } + return RetCode::SUCCESS; +} + +void EffectImpl::cleanUp() { + command(CommandId::STOP); + close(); +} + +RetCode EffectImpl::notifyEventFlag(uint32_t flag) { + if (!mEventFlag) { + LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": StatusEventFlag invalid"; + return RetCode::ERROR_EVENT_FLAG_ERROR; + } + if (const auto ret = mEventFlag->wake(flag); ret != ::android::OK) { + LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": wake failure with ret " << ret; + return RetCode::ERROR_EVENT_FLAG_ERROR; + } + LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << ": " << std::hex << mEventFlag; + return RetCode::SUCCESS; +} + +IEffect::Status EffectImpl::status(binder_status_t status, size_t consumed, size_t produced) { + IEffect::Status ret; + ret.status = status; + ret.fmqConsumed = consumed; + ret.fmqProduced = produced; + return ret; +} + +void EffectImpl::process() { + ATRACE_NAME(getEffectNameWithVersion().c_str()); + /** + * wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change + * in the life cycle of workerThread (threadLoop). + */ + uint32_t efState = 0; + if (!mEventFlag || + ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */, + true /* retry */) || + !(efState & mDataMqNotEmptyEf)) { + LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": StatusEventFlag - " << mEventFlag + << " efState - " << std::hex << efState; + return; + } + + { + std::lock_guard lg(mImplMutex); + if (mState != State::PROCESSING && mState != State::DRAINING) { + LOG(DEBUG) << getEffectNameWithVersion() + << " skip process in state: " << toString(mState); + return; + } + RETURN_VALUE_IF(!mImplContext, void(), "nullContext"); + auto statusMQ = mImplContext->getStatusFmq(); + auto inputMQ = mImplContext->getInputDataFmq(); + auto outputMQ = mImplContext->getOutputDataFmq(); + auto buffer = mImplContext->getWorkBuffer(); + if (!inputMQ || !outputMQ) { + return; + } + + assert(mImplContext->getWorkBufferSize() >= + std::max(inputMQ->availableToRead(), outputMQ->availableToWrite())); + auto processSamples = std::min(inputMQ->availableToRead(), outputMQ->availableToWrite()); + if (processSamples) { + inputMQ->read(buffer, processSamples); + IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples); + outputMQ->write(buffer, status.fmqProduced); + statusMQ->writeBlocking(&status, 1); + } + } +} + +// A placeholder processing implementation to copy samples from input to output +IEffect::Status EffectImpl::effectProcessImpl(float* in, float* out, int samples) { + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/EffectMain.cpp b/audio/EffectMain.cpp new file mode 100644 index 0000000..7a6141a --- /dev/null +++ b/audio/EffectMain.cpp @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "effectFactory-impl/EffectFactory.h" + +#include +#include +#include +#include + +#ifdef __ANDROID_APEX__ +#include +#endif + +/** Default name of effect configuration file. */ +static const char* kDefaultConfigName = "audio_effects_config.xml"; + +static inline std::string config_file_path() { + if constexpr (__ANDROID_VENDOR_API__ >= 202404) { + AApexInfo *apexInfo; + if (AApexInfo_create(&apexInfo) == AAPEXINFO_OK) { + std::string apexName(AApexInfo_getName(apexInfo)); + AApexInfo_destroy(apexInfo); + std::string candidatePath("/apex/"); + candidatePath.append(apexName).append("/etc/").append(kDefaultConfigName); + LOG(DEBUG) << __func__ << " effect lib path " << candidatePath; + if (access(candidatePath.c_str(), R_OK) == 0) { + return candidatePath; + } + } + } else { + LOG(DEBUG) << __func__ << " libapexsupport is not supported"; + } + LOG(DEBUG) << __func__ << ": Unable to resolve config file path in APEX"; + return android::audio_find_readable_configuration_file(kDefaultConfigName); +} + +int main() { + // This is a debug implementation, always enable debug logging. + android::base::SetMinimumLogSeverity(::android::base::DEBUG); + ABinderProcess_setThreadPoolMaxThreadCount(0); + + auto configFile = config_file_path(); + if (configFile == "") { + LOG(ERROR) << __func__ << ": config file " << kDefaultConfigName << " not found!"; + return EXIT_FAILURE; + } + LOG(DEBUG) << __func__ << ": start factory with configFile:" << configFile; + auto effectFactory = + ndk::SharedRefBase::make(configFile); + + std::string serviceName = std::string() + effectFactory->descriptor + "/default"; + binder_status_t status = + AServiceManager_addService(effectFactory->asBinder().get(), serviceName.c_str()); + CHECK_EQ(STATUS_OK, status); + + LOG(DEBUG) << __func__ << ": effectFactory: " << serviceName << " start"; + ABinderProcess_joinThreadPool(); + return EXIT_FAILURE; // should not reach +} diff --git a/audio/EffectThread.cpp b/audio/EffectThread.cpp new file mode 100644 index 0000000..1a52c13 --- /dev/null +++ b/audio/EffectThread.cpp @@ -0,0 +1,130 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_EffectThread" +#include +#include +#include + +#include "effect-impl/EffectThread.h" +#include "effect-impl/EffectTypes.h" + +namespace aidl::android::hardware::audio::effect { + +EffectThread::~EffectThread() { + destroyThread(); +} + +RetCode EffectThread::createThread(const std::string& name, int priority) { + if (mThread.joinable()) { + LOG(WARNING) << mName << __func__ << " thread already created, no-op"; + return RetCode::SUCCESS; + } + + mName = name; + mPriority = priority; + { + std::lock_guard lg(mThreadMutex); + mStop = true; + mExit = false; + } + + mThread = std::thread(&EffectThread::threadLoop, this); + LOG(VERBOSE) << mName << __func__ << " priority " << mPriority << " done"; + return RetCode::SUCCESS; +} + +RetCode EffectThread::destroyThread() { + { + std::lock_guard lg(mThreadMutex); + mStop = mExit = true; + } + + mCv.notify_one(); + if (mThread.joinable()) { + mThread.join(); + } + + LOG(VERBOSE) << mName << __func__; + return RetCode::SUCCESS; +} + +RetCode EffectThread::startThread() { + { + std::lock_guard lg(mThreadMutex); + if (mDraining) { + mDraining = false; + } else { + mStop = false; + } + mCv.notify_one(); + } + + LOG(VERBOSE) << mName << __func__; + return RetCode::SUCCESS; +} + +RetCode EffectThread::stopThread() { + { + std::lock_guard lg(mThreadMutex); + mStop = true; + mCv.notify_one(); + } + + LOG(VERBOSE) << mName << __func__; + return RetCode::SUCCESS; +} + +RetCode EffectThread::startDraining() { + std::lock_guard lg(mThreadMutex); + mDraining = true; + mCv.notify_one(); + + LOG(VERBOSE) << mName << __func__; + return RetCode::SUCCESS; +} + +RetCode EffectThread::finishDraining() { + std::lock_guard lg(mThreadMutex); + mDraining = false; + mStop = true; + mCv.notify_one(); + + LOG(VERBOSE) << mName << __func__; + return RetCode::SUCCESS; +} + +void EffectThread::threadLoop() { + pthread_setname_np(pthread_self(), mName.substr(0, kMaxTaskNameLen - 1).c_str()); + setpriority(PRIO_PROCESS, 0, mPriority); + while (true) { + { + std::unique_lock l(mThreadMutex); + ::android::base::ScopedLockAssertion lock_assertion(mThreadMutex); + mCv.wait(l, [&]() REQUIRES(mThreadMutex) { return mExit || !mStop; }); + if (mExit) { + LOG(VERBOSE) << mName << " threadLoop EXIT!"; + return; + } + } + process(); + } +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/EngineConfigXmlConverter.cpp b/audio/EngineConfigXmlConverter.cpp new file mode 100644 index 0000000..78deb64 --- /dev/null +++ b/audio/EngineConfigXmlConverter.cpp @@ -0,0 +1,268 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#define LOG_TAG "AHAL_Config" +#include +#include +#include +#include + +#include "core-impl/CapEngineConfigXmlConverter.h" +#include "core-impl/EngineConfigXmlConverter.h" +#include "core-impl/XsdcConversion.h" + +using aidl::android::hardware::audio::core::internal::CapEngineConfigXmlConverter; +using aidl::android::hardware::audio::core::internal::convertAudioUsageToAidl; +using aidl::android::media::audio::common::AudioAttributes; +using aidl::android::media::audio::common::AudioContentType; +using aidl::android::media::audio::common::AudioFlag; +using aidl::android::media::audio::common::AudioHalAttributesGroup; +using aidl::android::media::audio::common::AudioHalCapCriterion; +using aidl::android::media::audio::common::AudioHalCapCriterionType; +using aidl::android::media::audio::common::AudioHalCapCriterionV2; +using aidl::android::media::audio::common::AudioHalEngineConfig; +using aidl::android::media::audio::common::AudioHalProductStrategy; +using aidl::android::media::audio::common::AudioHalVolumeCurve; +using aidl::android::media::audio::common::AudioHalVolumeGroup; +using aidl::android::media::audio::common::AudioProductStrategyType; +using aidl::android::media::audio::common::AudioSource; +using aidl::android::media::audio::common::AudioStreamType; +using aidl::android::media::audio::common::AudioUsage; + +using ::android::BAD_VALUE; +using ::android::base::unexpected; + +namespace eng_xsd = android::audio::policy::engine::configuration; + +namespace aidl::android::hardware::audio::core::internal { + +/** Default path of audio policy cap engine configuration file. */ +static constexpr char kCapEngineConfigFileName[] = + "/parameter-framework/Settings/Policy/PolicyConfigurableDomains.xml"; + +ConversionResult EngineConfigXmlConverter::convertProductStrategyNameToAidl( + const std::string& xsdcProductStrategyName) { + const auto [it, success] = mProductStrategyMap.insert( + std::make_pair(xsdcProductStrategyName, mNextVendorStrategy)); + if (success) { + mNextVendorStrategy++; + } + return it->second; +} + +ConversionResult EngineConfigXmlConverter::convertProductStrategyIdToAidl(int xsdcId) { + if (xsdcId < AudioHalProductStrategy::VENDOR_STRATEGY_ID_START) { + return unexpected(BAD_VALUE); + } + return xsdcId; +} + +bool isDefaultAudioAttributes(const AudioAttributes& attributes) { + return ((attributes.contentType == AudioContentType::UNKNOWN) && + (attributes.usage == AudioUsage::UNKNOWN) && + (attributes.source == AudioSource::DEFAULT) && (attributes.flags == 0) && + (attributes.tags.empty())); +} + +ConversionResult EngineConfigXmlConverter::convertAudioAttributesToAidl( + const eng_xsd::AttributesType& xsdcAudioAttributes) { + if (xsdcAudioAttributes.hasAttributesRef()) { + if (mAttributesReferenceMap.empty()) { + mAttributesReferenceMap = + generateReferenceMap( + getXsdcConfig()->getAttributesRef()); + } + return convertAudioAttributesToAidl( + *(mAttributesReferenceMap.at(xsdcAudioAttributes.getAttributesRef()) + .getFirstAttributes())); + } + AudioAttributes aidlAudioAttributes; + if (xsdcAudioAttributes.hasContentType()) { + aidlAudioAttributes.contentType = VALUE_OR_FATAL(convertAudioContentTypeToAidl( + xsdcAudioAttributes.getFirstContentType()->getValue())); + } + if (xsdcAudioAttributes.hasUsage()) { + aidlAudioAttributes.usage = VALUE_OR_FATAL( + convertAudioUsageToAidl(xsdcAudioAttributes.getFirstUsage()->getValue())); + } + if (xsdcAudioAttributes.hasSource()) { + aidlAudioAttributes.source = VALUE_OR_FATAL( + convertAudioSourceToAidl(xsdcAudioAttributes.getFirstSource()->getValue())); + } + if (xsdcAudioAttributes.hasFlags()) { + std::vector xsdcFlagTypeVec = + xsdcAudioAttributes.getFirstFlags()->getValue(); + aidlAudioAttributes.flags = VALUE_OR_FATAL(convertAudioFlagsToAidl(xsdcFlagTypeVec)); + } + if (xsdcAudioAttributes.hasBundle()) { + const eng_xsd::BundleType* xsdcBundle = xsdcAudioAttributes.getFirstBundle(); + aidlAudioAttributes.tags.reserve(1); + aidlAudioAttributes.tags.push_back(xsdcBundle->getKey() + "_" + xsdcBundle->getValue()); + } + if (isDefaultAudioAttributes(aidlAudioAttributes)) { + mDefaultProductStrategyId = std::optional{-1}; + } + return aidlAudioAttributes; +} + +ConversionResult EngineConfigXmlConverter::convertAttributesGroupToAidl( + const eng_xsd::AttributesGroup& xsdcAttributesGroup) { + AudioHalAttributesGroup aidlAttributesGroup; + static const int kStreamTypeEnumOffset = + static_cast(eng_xsd::Stream::AUDIO_STREAM_VOICE_CALL) - + static_cast(AudioStreamType::VOICE_CALL); + aidlAttributesGroup.streamType = xsdcAttributesGroup.hasStreamType() + ? VALUE_OR_FATAL(convertAudioStreamTypeToAidl( + xsdcAttributesGroup.getStreamType())) + : AudioStreamType::INVALID; + aidlAttributesGroup.volumeGroupName = xsdcAttributesGroup.getVolumeGroup(); + if (xsdcAttributesGroup.hasAttributes_optional()) { + aidlAttributesGroup.attributes = + VALUE_OR_FATAL((convertCollectionToAidl( + xsdcAttributesGroup.getAttributes_optional(), + std::bind(&EngineConfigXmlConverter::convertAudioAttributesToAidl, this, + std::placeholders::_1)))); + } else if (xsdcAttributesGroup.hasContentType_optional() || + xsdcAttributesGroup.hasUsage_optional() || + xsdcAttributesGroup.hasSource_optional() || + xsdcAttributesGroup.hasFlags_optional() || + xsdcAttributesGroup.hasBundle_optional()) { + aidlAttributesGroup.attributes.push_back(VALUE_OR_FATAL(convertAudioAttributesToAidl( + eng_xsd::AttributesType(xsdcAttributesGroup.getContentType_optional(), + xsdcAttributesGroup.getUsage_optional(), + xsdcAttributesGroup.getSource_optional(), + xsdcAttributesGroup.getFlags_optional(), + xsdcAttributesGroup.getBundle_optional(), std::nullopt)))); + + } else { + LOG(ERROR) << __func__ << " Review Audio Policy config: no audio attributes provided for " + << aidlAttributesGroup.toString(); + return unexpected(BAD_VALUE); + } + return aidlAttributesGroup; +} + +ConversionResult EngineConfigXmlConverter::convertProductStrategyToAidl( + const eng_xsd::ProductStrategies::ProductStrategy& xsdcProductStrategy) { + AudioHalProductStrategy aidlProductStrategy; + + if (xsdcProductStrategy.hasId()) { + aidlProductStrategy.id = + VALUE_OR_FATAL(convertProductStrategyIdToAidl(xsdcProductStrategy.getId())); + } else { + aidlProductStrategy.id = + VALUE_OR_FATAL(convertProductStrategyNameToAidl(xsdcProductStrategy.getName())); + } + aidlProductStrategy.name = xsdcProductStrategy.getName(); + + if (xsdcProductStrategy.hasAttributesGroup()) { + aidlProductStrategy.attributesGroups = VALUE_OR_FATAL( + (convertCollectionToAidl( + xsdcProductStrategy.getAttributesGroup(), + std::bind(&EngineConfigXmlConverter::convertAttributesGroupToAidl, this, + std::placeholders::_1)))); + } + if ((mDefaultProductStrategyId != std::nullopt) && (mDefaultProductStrategyId.value() == -1)) { + mDefaultProductStrategyId = aidlProductStrategy.id; + } + return aidlProductStrategy; +} + +ConversionResult EngineConfigXmlConverter::convertVolumeCurveToAidl( + const eng_xsd::Volume& xsdcVolumeCurve) { + AudioHalVolumeCurve aidlVolumeCurve; + aidlVolumeCurve.deviceCategory = + static_cast(xsdcVolumeCurve.getDeviceCategory()); + if (xsdcVolumeCurve.hasRef()) { + if (mVolumesReferenceMap.empty()) { + mVolumesReferenceMap = generateReferenceMap( + getXsdcConfig()->getVolumes()); + } + aidlVolumeCurve.curvePoints = VALUE_OR_FATAL( + (convertCollectionToAidl( + mVolumesReferenceMap.at(xsdcVolumeCurve.getRef()).getPoint(), + &convertCurvePointToAidl))); + } else { + aidlVolumeCurve.curvePoints = VALUE_OR_FATAL( + (convertCollectionToAidl( + xsdcVolumeCurve.getPoint(), &convertCurvePointToAidl))); + } + return aidlVolumeCurve; +} + +ConversionResult EngineConfigXmlConverter::convertVolumeGroupToAidl( + const eng_xsd::VolumeGroupsType::VolumeGroup& xsdcVolumeGroup) { + AudioHalVolumeGroup aidlVolumeGroup; + aidlVolumeGroup.name = xsdcVolumeGroup.getName(); + aidlVolumeGroup.minIndex = xsdcVolumeGroup.getIndexMin(); + aidlVolumeGroup.maxIndex = xsdcVolumeGroup.getIndexMax(); + aidlVolumeGroup.volumeCurves = + VALUE_OR_FATAL((convertCollectionToAidl( + xsdcVolumeGroup.getVolume(), + std::bind(&EngineConfigXmlConverter::convertVolumeCurveToAidl, this, + std::placeholders::_1)))); + return aidlVolumeGroup; +} + +AudioHalEngineConfig& EngineConfigXmlConverter::getAidlEngineConfig() { + return mAidlEngineConfig; +} + +void EngineConfigXmlConverter::init() { + mProductStrategyMap = getLegacyProductStrategyMap(); + if (getXsdcConfig()->hasProductStrategies()) { + mAidlEngineConfig.productStrategies = VALUE_OR_FATAL( + (convertWrappedCollectionToAidl( + getXsdcConfig()->getProductStrategies(), + &eng_xsd::ProductStrategies::getProductStrategy, + std::bind(&EngineConfigXmlConverter::convertProductStrategyToAidl, this, + std::placeholders::_1)))); + if (mDefaultProductStrategyId) { + mAidlEngineConfig.defaultProductStrategyId = mDefaultProductStrategyId.value(); + } + } + if (getXsdcConfig()->hasVolumeGroups()) { + mAidlEngineConfig.volumeGroups = VALUE_OR_FATAL( + (convertWrappedCollectionToAidl( + getXsdcConfig()->getVolumeGroups(), + &eng_xsd::VolumeGroupsType::getVolumeGroup, + std::bind(&EngineConfigXmlConverter::convertVolumeGroupToAidl, this, + std::placeholders::_1)))); + } + if (getXsdcConfig()->hasCriteria() && getXsdcConfig()->hasCriterion_types()) { + AudioHalEngineConfig::CapSpecificConfig capSpecificConfig; + capSpecificConfig.criteriaV2 = + std::make_optional<>(VALUE_OR_FATAL((convertCapCriteriaCollectionToAidl( + getXsdcConfig()->getCriteria(), getXsdcConfig()->getCriterion_types())))); + internal::CapEngineConfigXmlConverter capEngConfigConverter{ + ::android::audio_find_readable_configuration_file(kCapEngineConfigFileName)}; + if (capEngConfigConverter.getStatus() == ::android::OK) { + capSpecificConfig.domains = std::move(capEngConfigConverter.getAidlCapEngineConfig()); + } + mAidlEngineConfig.capSpecificConfig = capSpecificConfig; + } +} +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/Module.cpp b/audio/Module.cpp new file mode 100644 index 0000000..aa624ff --- /dev/null +++ b/audio/Module.cpp @@ -0,0 +1,1841 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_Module" +#include +#include +#include +#include +#include + +#include "core-impl/Configuration.h" +#include "core-impl/Module.h" +#include "core-impl/ModuleBluetooth.h" +#include "core-impl/ModulePrimary.h" +#include "core-impl/ModuleRemoteSubmix.h" +#include "core-impl/ModuleStub.h" +#include "core-impl/ModuleUsb.h" +#include "core-impl/SoundDose.h" +#include "core-impl/utils.h" + +using aidl::android::hardware::audio::common::frameCountFromDurationMs; +using aidl::android::hardware::audio::common::getFrameSizeInBytes; +using aidl::android::hardware::audio::common::hasMmapFlag; +using aidl::android::hardware::audio::common::isBitPositionFlagSet; +using aidl::android::hardware::audio::common::isValidAudioMode; +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::audio::core::sounddose::ISoundDose; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioGainConfig; +using aidl::android::media::audio::common::AudioInputFlags; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioMMapPolicy; +using aidl::android::media::audio::common::AudioMMapPolicyInfo; +using aidl::android::media::audio::common::AudioMMapPolicyType; +using aidl::android::media::audio::common::AudioMode; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioOutputFlags; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::AudioProfile; +using aidl::android::media::audio::common::Boolean; +using aidl::android::media::audio::common::Int; +using aidl::android::media::audio::common::MicrophoneInfo; +using aidl::android::media::audio::common::PcmType; + +namespace aidl::android::hardware::audio::core { + +namespace { + +inline bool hasDynamicChannelMasks(const std::vector& channelMasks) { + return channelMasks.empty() || + std::all_of(channelMasks.begin(), channelMasks.end(), + [](const auto& channelMask) { return channelMask == AudioChannelLayout{}; }); +} + +inline bool hasDynamicFormat(const AudioFormatDescription& format) { + return format == AudioFormatDescription{}; +} + +inline bool hasDynamicSampleRates(const std::vector& sampleRates) { + return sampleRates.empty() || + std::all_of(sampleRates.begin(), sampleRates.end(), + [](const auto& sampleRate) { return sampleRate == 0; }); +} + +inline bool isDynamicProfile(const AudioProfile& profile) { + return hasDynamicFormat(profile.format) || hasDynamicChannelMasks(profile.channelMasks) || + hasDynamicSampleRates(profile.sampleRates); +} + +bool hasDynamicProfilesOnly(const std::vector& profiles) { + if (profiles.empty()) return true; + return std::all_of(profiles.begin(), profiles.end(), isDynamicProfile); +} + +bool findAudioProfile(const AudioPort& port, const AudioFormatDescription& format, + AudioProfile* profile) { + if (auto profilesIt = + find_if(port.profiles.begin(), port.profiles.end(), + [&format](const auto& profile) { return profile.format == format; }); + profilesIt != port.profiles.end()) { + *profile = *profilesIt; + return true; + } + return false; +} + +} // namespace + +// static +std::shared_ptr Module::createInstance(Type type, std::unique_ptr&& config) { + switch (type) { + case Type::DEFAULT: + return ndk::SharedRefBase::make(std::move(config)); + case Type::R_SUBMIX: + return ndk::SharedRefBase::make(std::move(config)); + case Type::STUB: + return ndk::SharedRefBase::make(std::move(config)); + case Type::USB: + return ndk::SharedRefBase::make(std::move(config)); + case Type::BLUETOOTH: + return ndk::SharedRefBase::make(std::move(config)); + } +} + +// static +std::optional Module::typeFromString(const std::string& type) { + if (type == "default") + return Module::Type::DEFAULT; + else if (type == "r_submix") + return Module::Type::R_SUBMIX; + else if (type == "stub") + return Module::Type::STUB; + else if (type == "usb") + return Module::Type::USB; + else if (type == "bluetooth") + return Module::Type::BLUETOOTH; + return {}; +} + +std::ostream& operator<<(std::ostream& os, Module::Type t) { + switch (t) { + case Module::Type::DEFAULT: + os << "default"; + break; + case Module::Type::R_SUBMIX: + os << "r_submix"; + break; + case Module::Type::STUB: + os << "stub"; + break; + case Module::Type::USB: + os << "usb"; + break; + case Module::Type::BLUETOOTH: + os << "bluetooth"; + break; + } + return os; +} + +Module::Module(Type type, std::unique_ptr&& config) + : mType(type), mConfig(std::move(config)) { + populateConnectedProfiles(); +} + +void Module::cleanUpPatch(int32_t patchId) { + erase_all_values(mPatches, std::set{patchId}); +} + +ndk::ScopedAStatus Module::createStreamContext( + int32_t in_portConfigId, int64_t in_bufferSizeFrames, + std::shared_ptr asyncCallback, + std::shared_ptr outEventCallback, StreamContext* out_context) { + if (in_bufferSizeFrames <= 0) { + LOG(ERROR) << __func__ << ": " << mType << ": non-positive buffer size " + << in_bufferSizeFrames; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto& configs = getConfig().portConfigs; + auto portConfigIt = findById(configs, in_portConfigId); + const int32_t nominalLatencyMs = getNominalLatencyMs(*portConfigIt); + // Since this is a private method, it is assumed that + // validity of the portConfigId has already been checked. + int32_t minimumStreamBufferSizeFrames = 0; + if (!calculateBufferSizeFrames( + portConfigIt->format.value(), nominalLatencyMs, + portConfigIt->sampleRate.value().value, &minimumStreamBufferSizeFrames).isOk()) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (in_bufferSizeFrames < minimumStreamBufferSizeFrames) { + LOG(ERROR) << __func__ << ": " << mType << ": insufficient buffer size " + << in_bufferSizeFrames << ", must be at least " << minimumStreamBufferSizeFrames; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const size_t frameSize = + getFrameSizeInBytes(portConfigIt->format.value(), portConfigIt->channelMask.value()); + if (frameSize == 0) { + LOG(ERROR) << __func__ << ": " << mType + << ": could not calculate frame size for port config " + << portConfigIt->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + LOG(DEBUG) << __func__ << ": " << mType << ": frame size " << frameSize << " bytes"; + if (frameSize > static_cast(kMaximumStreamBufferSizeBytes / in_bufferSizeFrames)) { + LOG(ERROR) << __func__ << ": " << mType << ": buffer size " << in_bufferSizeFrames + << " frames is too large, maximum size is " + << kMaximumStreamBufferSizeBytes / frameSize; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const auto& flags = portConfigIt->flags.value(); + StreamContext::DebugParameters params{mDebug.streamTransientStateDelayMs, + mVendorDebug.forceTransientBurst, + mVendorDebug.forceSynchronousDrain}; + std::shared_ptr soundDose; + if (!getSoundDose(&soundDose).isOk()) { + LOG(ERROR) << __func__ << ": could not create sound dose instance"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + StreamContext temp; + if (hasMmapFlag(flags)) { + MmapBufferDescriptor mmapDesc; + RETURN_STATUS_IF_ERROR( + createMmapBuffer(*portConfigIt, in_bufferSizeFrames, frameSize, &mmapDesc)); + temp = StreamContext( + std::make_unique(1, true /*configureEventFlagWord*/), + std::make_unique(1, true /*configureEventFlagWord*/), + portConfigIt->format.value(), portConfigIt->channelMask.value(), + portConfigIt->sampleRate.value().value, flags, nominalLatencyMs, + portConfigIt->ext.get().handle, std::move(mmapDesc), + outEventCallback, mSoundDose.getInstance(), params); + } else { + temp = StreamContext( + std::make_unique(1, true /*configureEventFlagWord*/), + std::make_unique(1, true /*configureEventFlagWord*/), + portConfigIt->format.value(), portConfigIt->channelMask.value(), + portConfigIt->sampleRate.value().value, flags, nominalLatencyMs, + portConfigIt->ext.get().handle, + std::make_unique(frameSize * in_bufferSizeFrames), + asyncCallback, outEventCallback, mSoundDose.getInstance(), params); + } + if (temp.isValid()) { + *out_context = std::move(temp); + } else { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return ndk::ScopedAStatus::ok(); +} + +std::vector Module::getDevicesFromDevicePortConfigIds( + const std::set& devicePortConfigIds) { + std::vector result; + auto& configs = getConfig().portConfigs; + for (const auto& id : devicePortConfigIds) { + auto it = findById(configs, id); + if (it != configs.end() && it->ext.getTag() == AudioPortExt::Tag::device) { + result.push_back(it->ext.template get().device); + } else { + LOG(FATAL) << __func__ << ": " << mType + << ": failed to find device for id" << id; + } + } + return result; +} + +std::vector Module::findConnectedDevices(int32_t portConfigId) { + return getDevicesFromDevicePortConfigIds(findConnectedPortConfigIds(portConfigId)); +} + +std::set Module::findConnectedPortConfigIds(int32_t portConfigId) { + std::set result; + auto patchIdsRange = mPatches.equal_range(portConfigId); + auto& patches = getConfig().patches; + for (auto it = patchIdsRange.first; it != patchIdsRange.second; ++it) { + auto patchIt = findById(patches, it->second); + if (patchIt == patches.end()) { + LOG(FATAL) << __func__ << ": " << mType << ": patch with id " << it->second + << " taken from mPatches " + << "not found in the configuration"; + } + if (std::find(patchIt->sourcePortConfigIds.begin(), patchIt->sourcePortConfigIds.end(), + portConfigId) != patchIt->sourcePortConfigIds.end()) { + result.insert(patchIt->sinkPortConfigIds.begin(), patchIt->sinkPortConfigIds.end()); + } else { + result.insert(patchIt->sourcePortConfigIds.begin(), patchIt->sourcePortConfigIds.end()); + } + } + return result; +} + +ndk::ScopedAStatus Module::findPortIdForNewStream(int32_t in_portConfigId, AudioPort** port) { + auto& configs = getConfig().portConfigs; + auto portConfigIt = findById(configs, in_portConfigId); + if (portConfigIt == configs.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": existing port config id " << in_portConfigId + << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const int32_t portId = portConfigIt->portId; + // In our implementation, configs of mix ports always have unique IDs. + CHECK(portId != in_portConfigId); + auto& ports = getConfig().ports; + auto portIt = findById(ports, portId); + if (portIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << portId + << " used by port config id " << in_portConfigId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (mStreams.count(in_portConfigId) != 0) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_portConfigId + << " already has a stream opened on it"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (portIt->ext.getTag() != AudioPortExt::Tag::mix) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_portConfigId + << " does not correspond to a mix port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const size_t maxOpenStreamCount = portIt->ext.get().maxOpenStreamCount; + if (maxOpenStreamCount != 0 && mStreams.count(portId) >= maxOpenStreamCount) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << portId + << " has already reached maximum allowed opened stream count: " + << maxOpenStreamCount; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + *port = &(*portIt); + return ndk::ScopedAStatus::ok(); +} + +bool Module::generateDefaultPortConfig(const AudioPort& port, AudioPortConfig* config) { + const bool allowDynamicConfig = port.ext.getTag() == AudioPortExt::device; + for (const auto& profile : port.profiles) { + if (isDynamicProfile(profile)) continue; + config->format = profile.format; + config->channelMask = *profile.channelMasks.begin(); + config->sampleRate = Int{.value = *profile.sampleRates.begin()}; + config->flags = port.flags; + config->ext = port.ext; + return true; + } + if (allowDynamicConfig) { + config->format = AudioFormatDescription{}; + config->channelMask = AudioChannelLayout{}; + config->sampleRate = Int{.value = 0}; + config->flags = port.flags; + config->ext = port.ext; + return true; + } + LOG(ERROR) << __func__ << ": " << mType << ": port " << port.id << " only has dynamic profiles"; + return false; +} + +void Module::populateConnectedProfiles() { + Configuration& config = getConfig(); + for (const AudioPort& port : config.ports) { + if (port.ext.getTag() == AudioPortExt::device) { + if (auto devicePort = port.ext.get(); + !devicePort.device.type.connection.empty() && port.profiles.empty()) { + if (auto connIt = config.connectedProfiles.find(port.id); + connIt == config.connectedProfiles.end()) { + config.connectedProfiles.emplace( + port.id, internal::getStandard16And24BitPcmAudioProfiles()); + } + } + } + } +} + +template +std::set Module::portIdsFromPortConfigIds(C portConfigIds) { + std::set result; + auto& portConfigs = getConfig().portConfigs; + for (auto it = portConfigIds.begin(); it != portConfigIds.end(); ++it) { + auto portConfigIt = findById(portConfigs, *it); + if (portConfigIt != portConfigs.end()) { + result.insert(portConfigIt->portId); + } + } + return result; +} + +std::unique_ptr Module::initializeConfig() { + return internal::getConfiguration(getType()); +} + +int32_t Module::getNominalLatencyMs(const AudioPortConfig&) { + // Arbitrary value. Implementations must override this method to provide their actual latency. + static constexpr int32_t kLatencyMs = 5; + return kLatencyMs; +} + +ndk::ScopedAStatus Module::calculateBufferSizeFrames( + const ::aidl::android::media::audio::common::AudioFormatDescription &format, + int32_t latencyMs, int32_t sampleRateHz, int32_t *bufferSizeFrames) { + if (format.type == AudioFormatType::PCM) { + *bufferSizeFrames = calculateBufferSizeFramesForPcm(latencyMs, sampleRateHz); + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": " << mType << ": format " << format.toString() + << " is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Module::createMmapBuffer(const AudioPortConfig& portConfig __unused, + int32_t bufferSizeFrames __unused, + int32_t frameSizeBytes __unused, + MmapBufferDescriptor* desc __unused) { + LOG(ERROR) << __func__ << ": " << mType << ": is not implemented"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +std::vector Module::getAudioRoutesForAudioPortImpl(int32_t portId) { + std::vector result; + auto& routes = getConfig().routes; + for (auto& r : routes) { + const auto& srcs = r.sourcePortIds; + if (r.sinkPortId == portId || std::find(srcs.begin(), srcs.end(), portId) != srcs.end()) { + result.push_back(&r); + } + } + return result; +} + +Module::Configuration& Module::getConfig() { + if (!mConfig) { + mConfig = initializeConfig(); + } + return *mConfig; +} + +std::set Module::getRoutableAudioPortIds(int32_t portId, + std::vector* routes) { + std::vector routesStorage; + if (routes == nullptr) { + routesStorage = getAudioRoutesForAudioPortImpl(portId); + routes = &routesStorage; + } + std::set result; + for (AudioRoute* r : *routes) { + if (r->sinkPortId == portId) { + result.insert(r->sourcePortIds.begin(), r->sourcePortIds.end()); + } else { + result.insert(r->sinkPortId); + } + } + return result; +} + +void Module::registerPatch(const AudioPatch& patch) { + auto& configs = getConfig().portConfigs; + auto do_insert = [&](const std::vector& portConfigIds) { + for (auto portConfigId : portConfigIds) { + auto configIt = findById(configs, portConfigId); + if (configIt != configs.end()) { + mPatches.insert(std::pair{portConfigId, patch.id}); + if (configIt->portId != portConfigId) { + mPatches.insert(std::pair{configIt->portId, patch.id}); + } + } + }; + }; + do_insert(patch.sourcePortConfigIds); + do_insert(patch.sinkPortConfigIds); +} + +ndk::ScopedAStatus Module::updateStreamsConnectedState(const AudioPatch& oldPatch, + const AudioPatch& newPatch) { + // Notify streams about the new set of devices they are connected to. + auto maybeFailure = ndk::ScopedAStatus::ok(); + using Connections = + std::map>; + Connections oldConnections, newConnections; + auto fillConnectionsHelper = [&](Connections& connections, + const std::vector& mixPortCfgIds, + const std::vector& devicePortCfgIds) { + for (int32_t mixPortCfgId : mixPortCfgIds) { + connections[mixPortCfgId].insert(devicePortCfgIds.begin(), devicePortCfgIds.end()); + } + }; + auto fillConnections = [&](Connections& connections, const AudioPatch& patch) { + if (std::find_if(patch.sourcePortConfigIds.begin(), patch.sourcePortConfigIds.end(), + [&](int32_t portConfigId) { return mStreams.count(portConfigId) > 0; }) != + patch.sourcePortConfigIds.end()) { + // Sources are mix ports. + fillConnectionsHelper(connections, patch.sourcePortConfigIds, patch.sinkPortConfigIds); + } else if (std::find_if(patch.sinkPortConfigIds.begin(), patch.sinkPortConfigIds.end(), + [&](int32_t portConfigId) { + return mStreams.count(portConfigId) > 0; + }) != patch.sinkPortConfigIds.end()) { + // Sources are device ports. + fillConnectionsHelper(connections, patch.sinkPortConfigIds, patch.sourcePortConfigIds); + } // Otherwise, there are no streams to notify. + }; + auto restoreOldConnections = [&](const std::set& mixPortIds, + const bool continueWithEmptyDevices) { + for (const auto mixPort : mixPortIds) { + if (auto it = oldConnections.find(mixPort); + continueWithEmptyDevices || it != oldConnections.end()) { + const std::vector d = + it != oldConnections.end() ? getDevicesFromDevicePortConfigIds(it->second) + : std::vector(); + if (auto status = mStreams.setStreamConnectedDevices(mixPort, d); status.isOk()) { + LOG(WARNING) << ":updateStreamsConnectedState: rollback: mix port config:" + << mixPort + << (d.empty() ? "; not connected" + : std::string("; connected to ") + + ::android::internal::ToString(d)); + } else { + // can't do much about rollback failures + LOG(ERROR) + << ":updateStreamsConnectedState: rollback: failed for mix port config:" + << mixPort; + } + } + } + }; + fillConnections(oldConnections, oldPatch); + fillConnections(newConnections, newPatch); + /** + * Illustration of oldConnections and newConnections + * + * oldConnections { + * a : {A,B,C}, + * b : {D}, + * d : {H,I,J}, + * e : {N,O,P}, + * f : {Q,R}, + * g : {T,U,V}, + * } + * + * newConnections { + * a : {A,B,C}, + * c : {E,F,G}, + * d : {K,L,M}, + * e : {N,P}, + * f : {Q,R,S}, + * g : {U,V,W}, + * } + * + * Expected routings: + * 'a': is ignored both in disconnect step and connect step, + * due to same devices both in oldConnections and newConnections. + * 'b': handled only in disconnect step with empty devices because 'b' is only present + * in oldConnections. + * 'c': handled only in connect step with {E,F,G} devices because 'c' is only present + * in newConnections. + * 'd': handled only in connect step with {K,L,M} devices because 'd' is also present + * in newConnections and it is ignored in disconnected step. + * 'e': handled only in connect step with {N,P} devices because 'e' is also present + * in newConnections and it is ignored in disconnect step. please note that there + * is no exclusive disconnection for device {O}. + * 'f': handled only in connect step with {Q,R,S} devices because 'f' is also present + * in newConnections and it is ignored in disconnect step. Even though stream is + * already connected with {Q,R} devices and connection happens with {Q,R,S}. + * 'g': handled only in connect step with {U,V,W} devices because 'g' is also present + * in newConnections and it is ignored in disconnect step. There is no exclusive + * disconnection with devices {T,U,V}. + * + * If, any failure, will lead to restoreOldConnections (rollback). + * The aim of the restoreOldConnections is to make connections back to oldConnections. + * Failures in restoreOldConnections aren't handled. + */ + + std::set idsToConnectBackOnFailure; + // disconnection step + for (const auto& [oldMixPortConfigId, oldDevicePortConfigIds] : oldConnections) { + if (auto it = newConnections.find(oldMixPortConfigId); it == newConnections.end()) { + idsToConnectBackOnFailure.insert(oldMixPortConfigId); + if (auto status = mStreams.setStreamConnectedDevices(oldMixPortConfigId, {}); + status.isOk()) { + LOG(DEBUG) << __func__ << ": The stream on port config id " << oldMixPortConfigId + << " has been disconnected"; + } else { + maybeFailure = std::move(status); + // proceed to rollback even on one failure + break; + } + } + } + + if (!maybeFailure.isOk()) { + restoreOldConnections(idsToConnectBackOnFailure, false /*continueWithEmptyDevices*/); + LOG(WARNING) << __func__ << ": failed to disconnect from old patch. attempted rollback"; + return maybeFailure; + } + + std::set idsToRollbackOnFailure; + // connection step + for (const auto& [newMixPortConfigId, newDevicePortConfigIds] : newConnections) { + if (auto it = oldConnections.find(newMixPortConfigId); + it == oldConnections.end() || it->second != newDevicePortConfigIds) { + const auto connectedDevices = getDevicesFromDevicePortConfigIds(newDevicePortConfigIds); + idsToRollbackOnFailure.insert(newMixPortConfigId); + if (connectedDevices.empty()) { + // This is important as workers use the vector size to derive the connection status. + LOG(FATAL) << __func__ << ": No connected devices found for port config id " + << newMixPortConfigId; + } + if (auto status = + mStreams.setStreamConnectedDevices(newMixPortConfigId, connectedDevices); + status.isOk()) { + LOG(DEBUG) << __func__ << ": The stream on port config id " << newMixPortConfigId + << " has been connected to: " + << ::android::internal::ToString(connectedDevices); + } else { + maybeFailure = std::move(status); + // proceed to rollback even on one failure + break; + } + } + } + + if (!maybeFailure.isOk()) { + restoreOldConnections(idsToConnectBackOnFailure, false /*continueWithEmptyDevices*/); + restoreOldConnections(idsToRollbackOnFailure, true /*continueWithEmptyDevices*/); + LOG(WARNING) << __func__ << ": failed to connect for new patch. attempted rollback"; + return maybeFailure; + } + + return ndk::ScopedAStatus::ok(); +} + +binder_status_t Module::dump(int fd, const char** args, uint32_t numArgs) { + for (const auto& portConfig : getConfig().portConfigs) { + if (portConfig.ext.getTag() == AudioPortExt::Tag::mix) { + getStreams().dump(portConfig.id, fd, args, numArgs); + } + } + return STATUS_OK; +} + +ndk::ScopedAStatus Module::setModuleDebug( + const ::aidl::android::hardware::audio::core::ModuleDebug& in_debug) { + LOG(DEBUG) << __func__ << ": " << mType << ": old flags:" << mDebug.toString() + << ", new flags: " << in_debug.toString(); + if (mDebug.simulateDeviceConnections != in_debug.simulateDeviceConnections && + !mConnectedDevicePorts.empty()) { + LOG(ERROR) << __func__ << ": " << mType + << ": attempting to change device connections simulation while " + "having external " + << "devices connected"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (in_debug.streamTransientStateDelayMs < 0) { + LOG(ERROR) << __func__ << ": " << mType << ": streamTransientStateDelayMs is negative: " + << in_debug.streamTransientStateDelayMs; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + mDebug = in_debug; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getTelephony(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": " << mType << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getBluetooth(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": " << mType << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getBluetoothA2dp(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": " << mType << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getBluetoothLe(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": " << mType << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::connectExternalDevice(const AudioPort& in_templateIdAndAdditionalData, + AudioPort* _aidl_return) { + const int32_t templateId = in_templateIdAndAdditionalData.id; + auto& ports = getConfig().ports; + AudioPort connectedPort; + { // Scope the template port so that we don't accidentally modify it. + auto templateIt = findById(ports, templateId); + if (templateIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << templateId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (templateIt->ext.getTag() != AudioPortExt::Tag::device) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << templateId + << " is not a device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto& templateDevicePort = templateIt->ext.get(); + if (templateDevicePort.device.type.connection.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << templateId + << " is permanently attached"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (mConnectedDevicePorts.find(templateId) != mConnectedDevicePorts.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << templateId + << " is a connected device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + // Postpone id allocation until we ensure that there are no client errors. + connectedPort = *templateIt; + connectedPort.extraAudioDescriptors = in_templateIdAndAdditionalData.extraAudioDescriptors; + const auto& inputDevicePort = + in_templateIdAndAdditionalData.ext.get(); + auto& connectedDevicePort = connectedPort.ext.get(); + connectedDevicePort.device.address = inputDevicePort.device.address; + LOG(DEBUG) << __func__ << ": " << mType << ": device port " << connectedPort.id + << " device set to " << connectedDevicePort.device.toString(); + // Check if there is already a connected port with for the same external device. + + for (auto connectedPortPair : mConnectedDevicePorts) { + auto connectedPortIt = findById(ports, connectedPortPair.first); + if (connectedPortIt->ext.get().device == + connectedDevicePort.device) { + LOG(ERROR) << __func__ << ": " << mType << ": device " + << connectedDevicePort.device.toString() + << " is already connected at the device port id " + << connectedPortPair.first; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + } + } + + // Two main cases are considered with regard to the profiles of the connected device port: + // + // 1. If the template device port has dynamic profiles, and at least one routable mix + // port also has dynamic profiles, it means that after connecting the device, the + // connected device port must have profiles populated with actual capabilities of + // the connected device, and dynamic of routable mix ports will be filled + // according to these capabilities. An example of this case is connection of an + // HDMI or USB device. For USB handled by ADSP, there can be mix ports with static + // profiles, and one dedicated mix port for "hi-fi" playback. The latter is left with + // dynamic profiles so that they can be populated with actual capabilities of + // the connected device. + // + // 2. If the template device port has dynamic profiles, while all routable mix ports + // have static profiles, it means that after connecting the device, the connected + // device port can be left with dynamic profiles, and profiles of mix ports are + // left untouched. An example of this case is connection of an analog wired + // headset, it should be treated in the same way as a speaker. + // + // Yet another possible case is when both the template device port and all routable + // mix ports have static profiles. This is allowed and handled correctly, however, it + // is not very practical, since these profiles are likely duplicates of each other. + + std::vector routesToMixPorts = getAudioRoutesForAudioPortImpl(templateId); + std::set routableMixPortIds = getRoutableAudioPortIds(templateId, &routesToMixPorts); + const int32_t nextPortId = getConfig().nextPortId++; + if (!mDebug.simulateDeviceConnections) { + // Even if the device port has static profiles, the HAL module might need to update + // them, or abort the connection process. + RETURN_STATUS_IF_ERROR(populateConnectedDevicePort(&connectedPort, nextPortId)); + } else if (hasDynamicProfilesOnly(connectedPort.profiles)) { + auto& connectedProfiles = getConfig().connectedProfiles; + if (auto connectedProfilesIt = connectedProfiles.find(templateId); + connectedProfilesIt != connectedProfiles.end()) { + connectedPort.profiles = connectedProfilesIt->second; + } + } + if (hasDynamicProfilesOnly(connectedPort.profiles)) { + // Possible case 2. Check if all routable mix ports have static profiles. + if (auto dynamicMixPortIt = std::find_if(ports.begin(), ports.end(), + [&routableMixPortIds](const auto& p) { + return routableMixPortIds.count(p.id) > 0 && + hasDynamicProfilesOnly(p.profiles); + }); + dynamicMixPortIt != ports.end()) { + LOG(ERROR) << __func__ << ": " << mType + << ": connected port only has dynamic profiles after connecting " + << "external device " << connectedPort.toString() << ", and there exist " + << "a routable mix port with dynamic profiles: " + << dynamicMixPortIt->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + } + + connectedPort.id = nextPortId; + auto [connectedPortsIt, _] = + mConnectedDevicePorts.insert(std::pair(connectedPort.id, std::set())); + LOG(DEBUG) << __func__ << ": " << mType << ": template port " << templateId + << " external device connected, " + << "connected port ID " << connectedPort.id; + ports.push_back(connectedPort); + onExternalDeviceConnectionChanged(connectedPort, true /*connected*/); + + // For routes where the template port is a source, add the connected port to sources, + // otherwise, create a new route by copying from the route for the template port. + std::vector newRoutes; + for (AudioRoute* r : routesToMixPorts) { + if (r->sinkPortId == templateId) { + newRoutes.push_back(AudioRoute{.sourcePortIds = r->sourcePortIds, + .sinkPortId = connectedPort.id, + .isExclusive = r->isExclusive}); + } else { + r->sourcePortIds.push_back(connectedPort.id); + } + } + auto& routes = getConfig().routes; + routes.insert(routes.end(), newRoutes.begin(), newRoutes.end()); + + if (!hasDynamicProfilesOnly(connectedPort.profiles) && !routableMixPortIds.empty()) { + // Note: this is a simplistic approach assuming that a mix port can only be populated + // from a single device port. Implementing support for stuffing dynamic profiles with + // a superset of all profiles from all routable dynamic device ports would be more involved. + for (auto& port : ports) { + if (routableMixPortIds.count(port.id) == 0) continue; + if (hasDynamicProfilesOnly(port.profiles)) { + port.profiles = connectedPort.profiles; + connectedPortsIt->second.insert(port.id); + } else { + // Check if profiles are not all dynamic because they were populated by + // a previous connection. Otherwise, it means that they are actually static. + for (const auto& cp : mConnectedDevicePorts) { + if (cp.second.count(port.id) > 0) { + connectedPortsIt->second.insert(port.id); + break; + } + } + } + } + } + *_aidl_return = std::move(connectedPort); + + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::disconnectExternalDevice(int32_t in_portId) { + auto& ports = getConfig().ports; + auto portIt = findById(ports, in_portId); + if (portIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (portIt->ext.getTag() != AudioPortExt::Tag::device) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId + << " is not a device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto connectedPortsIt = mConnectedDevicePorts.find(in_portId); + if (connectedPortsIt == mConnectedDevicePorts.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId + << " is not a connected device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto& configs = getConfig().portConfigs; + auto& initials = getConfig().initialConfigs; + auto configIt = std::find_if(configs.begin(), configs.end(), [&](const auto& config) { + if (config.portId == in_portId) { + // Check if the configuration was provided by the client. + const auto& initialIt = findById(initials, config.id); + return initialIt == initials.end() || config != *initialIt; + } + return false; + }); + if (configIt != configs.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId + << " has a non-default config with id " << configIt->id; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + onExternalDeviceConnectionChanged(*portIt, false /*connected*/); + ports.erase(portIt); + LOG(DEBUG) << __func__ << ": " << mType << ": connected device port " << in_portId + << " released"; + + auto& routes = getConfig().routes; + for (auto routesIt = routes.begin(); routesIt != routes.end();) { + if (routesIt->sinkPortId == in_portId) { + routesIt = routes.erase(routesIt); + } else { + // Note: the list of sourcePortIds can't become empty because there must + // be the id of the template port in the route. + erase_if(routesIt->sourcePortIds, [in_portId](auto src) { return src == in_portId; }); + ++routesIt; + } + } + + // Clear profiles for mix ports that are not connected to any other ports. + std::set mixPortsToClear = std::move(connectedPortsIt->second); + mConnectedDevicePorts.erase(connectedPortsIt); + for (const auto& connectedPort : mConnectedDevicePorts) { + for (int32_t mixPortId : connectedPort.second) { + mixPortsToClear.erase(mixPortId); + } + } + for (int32_t mixPortId : mixPortsToClear) { + auto mixPortIt = findById(ports, mixPortId); + if (mixPortIt != ports.end()) { + mixPortIt->profiles = {}; + } + } + + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::prepareToDisconnectExternalDevice(int32_t in_portId) { + auto& ports = getConfig().ports; + auto portIt = findById(ports, in_portId); + if (portIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (portIt->ext.getTag() != AudioPortExt::Tag::device) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId + << " is not a device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto connectedPortsIt = mConnectedDevicePorts.find(in_portId); + if (connectedPortsIt == mConnectedDevicePorts.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId + << " is not a connected device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + onPrepareToDisconnectExternalDevice(*portIt); + + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAudioPatches(std::vector* _aidl_return) { + *_aidl_return = getConfig().patches; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << _aidl_return->size() << " patches"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAudioPort(int32_t in_portId, AudioPort* _aidl_return) { + auto& ports = getConfig().ports; + auto portIt = findById(ports, in_portId); + if (portIt != ports.end()) { + *_aidl_return = *portIt; + LOG(DEBUG) << __func__ << ": " << mType << ": returning port by id " << in_portId; + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Module::getAudioPortConfigs(std::vector* _aidl_return) { + *_aidl_return = getConfig().portConfigs; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << _aidl_return->size() + << " port configs"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAudioPorts(std::vector* _aidl_return) { + *_aidl_return = getConfig().ports; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << _aidl_return->size() << " ports"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAudioRoutes(std::vector* _aidl_return) { + *_aidl_return = getConfig().routes; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << _aidl_return->size() << " routes"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAudioRoutesForAudioPort(int32_t in_portId, + std::vector* _aidl_return) { + auto& ports = getConfig().ports; + if (auto portIt = findById(ports, in_portId); portIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << in_portId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + std::vector routes = getAudioRoutesForAudioPortImpl(in_portId); + std::transform(routes.begin(), routes.end(), std::back_inserter(*_aidl_return), + [](auto rptr) { return *rptr; }); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::openInputStream(const OpenInputStreamArguments& in_args, + OpenInputStreamReturn* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType << ": port config id " << in_args.portConfigId + << ", buffer size " << in_args.bufferSizeFrames << " frames"; + AudioPort* port = nullptr; + RETURN_STATUS_IF_ERROR(findPortIdForNewStream(in_args.portConfigId, &port)); + if (port->flags.getTag() != AudioIoFlags::Tag::input) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_args.portConfigId + << " does not correspond to an input mix port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + StreamContext context; + RETURN_STATUS_IF_ERROR(createStreamContext(in_args.portConfigId, in_args.bufferSizeFrames, + nullptr, nullptr, &context)); + context.fillDescriptor(&_aidl_return->desc); + std::shared_ptr stream; + RETURN_STATUS_IF_ERROR(createInputStream(std::move(context), in_args.sinkMetadata, + getMicrophoneInfos(), &stream)); + StreamWrapper streamWrapper(stream); + if (auto patchIt = mPatches.find(in_args.portConfigId); patchIt != mPatches.end()) { + RETURN_STATUS_IF_ERROR( + streamWrapper.setConnectedDevices(findConnectedDevices(in_args.portConfigId))); + } + auto streamBinder = streamWrapper.getBinder(); + AIBinder_setMinSchedulerPolicy(streamBinder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO); + AIBinder_setInheritRt(streamBinder.get(), true); + mStreams.insert(port->id, in_args.portConfigId, std::move(streamWrapper)); + _aidl_return->stream = std::move(stream); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::openOutputStream(const OpenOutputStreamArguments& in_args, + OpenOutputStreamReturn* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType << ": port config id " << in_args.portConfigId + << ", has offload info? " << (in_args.offloadInfo.has_value()) << ", buffer size " + << in_args.bufferSizeFrames << " frames"; + AudioPort* port = nullptr; + RETURN_STATUS_IF_ERROR(findPortIdForNewStream(in_args.portConfigId, &port)); + if (port->flags.getTag() != AudioIoFlags::Tag::output) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_args.portConfigId + << " does not correspond to an output mix port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const bool isOffload = isBitPositionFlagSet(port->flags.get(), + AudioOutputFlags::COMPRESS_OFFLOAD); + if (isOffload && !in_args.offloadInfo.has_value()) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << port->id + << " has COMPRESS_OFFLOAD flag set, requires offload info"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const bool isNonBlocking = isBitPositionFlagSet(port->flags.get(), + AudioOutputFlags::NON_BLOCKING); + if (isNonBlocking && in_args.callback == nullptr) { + LOG(ERROR) << __func__ << ": " << mType << ": port id " << port->id + << " has NON_BLOCKING flag set, requires async callback"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + StreamContext context; + RETURN_STATUS_IF_ERROR(createStreamContext(in_args.portConfigId, in_args.bufferSizeFrames, + isNonBlocking ? in_args.callback : nullptr, + in_args.eventCallback, &context)); + context.fillDescriptor(&_aidl_return->desc); + std::shared_ptr stream; + RETURN_STATUS_IF_ERROR(createOutputStream(std::move(context), in_args.sourceMetadata, + in_args.offloadInfo, &stream)); + StreamWrapper streamWrapper(stream); + if (auto patchIt = mPatches.find(in_args.portConfigId); patchIt != mPatches.end()) { + RETURN_STATUS_IF_ERROR( + streamWrapper.setConnectedDevices(findConnectedDevices(in_args.portConfigId))); + } + auto streamBinder = streamWrapper.getBinder(); + AIBinder_setMinSchedulerPolicy(streamBinder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO); + AIBinder_setInheritRt(streamBinder.get(), true); + mStreams.insert(port->id, in_args.portConfigId, std::move(streamWrapper)); + _aidl_return->stream = std::move(stream); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getSupportedPlaybackRateFactors( + SupportedPlaybackRateFactors* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Module::setAudioPatch(const AudioPatch& in_requested, AudioPatch* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType << ": requested patch " << in_requested.toString(); + if (in_requested.sourcePortConfigIds.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": requested patch has empty sources list"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (!all_unique(in_requested.sourcePortConfigIds)) { + LOG(ERROR) << __func__ << ": " << mType + << ": requested patch has duplicate ids in the sources list"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_requested.sinkPortConfigIds.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": requested patch has empty sinks list"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (!all_unique(in_requested.sinkPortConfigIds)) { + LOG(ERROR) << __func__ << ": " << mType + << ": requested patch has duplicate ids in the sinks list"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + auto& configs = getConfig().portConfigs; + std::vector missingIds; + auto sources = + selectByIds(configs, in_requested.sourcePortConfigIds, &missingIds); + if (!missingIds.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": following source port config ids not found: " + << ::android::internal::ToString(missingIds); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto sinks = selectByIds(configs, in_requested.sinkPortConfigIds, &missingIds); + if (!missingIds.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": following sink port config ids not found: " + << ::android::internal::ToString(missingIds); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + // bool indicates whether a non-exclusive route is available. + // If only an exclusive route is available, that means the patch can not be + // established if there is any other patch which currently uses the sink port. + std::map allowedSinkPorts; + auto& routes = getConfig().routes; + for (auto src : sources) { + for (const auto& r : routes) { + const auto& srcs = r.sourcePortIds; + if (std::find(srcs.begin(), srcs.end(), src->portId) != srcs.end()) { + if (!allowedSinkPorts[r.sinkPortId]) { // prefer non-exclusive + allowedSinkPorts[r.sinkPortId] = !r.isExclusive; + } + } + } + } + for (auto sink : sinks) { + if (allowedSinkPorts.count(sink->portId) == 0) { + LOG(ERROR) << __func__ << ": " << mType << ": there is no route to the sink port id " + << sink->portId; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + RETURN_STATUS_IF_ERROR(checkAudioPatchEndpointsMatch(sources, sinks)); + + auto& patches = getConfig().patches; + auto existing = patches.end(); + std::optional patchesBackup; + if (in_requested.id != 0) { + existing = findById(patches, in_requested.id); + if (existing != patches.end()) { + patchesBackup = mPatches; + cleanUpPatch(existing->id); + } else { + LOG(ERROR) << __func__ << ": " << mType << ": not found existing patch id " + << in_requested.id; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + // Validate the requested patch. + for (const auto& [sinkPortId, nonExclusive] : allowedSinkPorts) { + if (!nonExclusive && mPatches.count(sinkPortId) != 0) { + LOG(ERROR) << __func__ << ": " << mType << ": sink port id " << sinkPortId + << "is exclusive and is already used by some other patch"; + if (patchesBackup.has_value()) { + mPatches = std::move(*patchesBackup); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + } + // Find the highest sample rate among mix port configs. + std::map sampleRates; + std::vector& mixPortConfigs = + sources[0]->ext.getTag() == AudioPortExt::mix ? sources : sinks; + for (auto mix : mixPortConfigs) { + sampleRates.emplace(mix->sampleRate.value().value, mix); + } + *_aidl_return = in_requested; + auto maxSampleRateIt = std::max_element(sampleRates.begin(), sampleRates.end()); + const int32_t latencyMs = getNominalLatencyMs(*(maxSampleRateIt->second)); + if (!calculateBufferSizeFrames( + maxSampleRateIt->second->format.value(), latencyMs, maxSampleRateIt->first, + &_aidl_return->minimumStreamBufferSizeFrames).isOk()) { + if (patchesBackup.has_value()) { + mPatches = std::move(*patchesBackup); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + _aidl_return->latenciesMs.clear(); + _aidl_return->latenciesMs.insert(_aidl_return->latenciesMs.end(), + _aidl_return->sinkPortConfigIds.size(), latencyMs); + AudioPatch oldPatch{}; + if (existing == patches.end()) { + _aidl_return->id = getConfig().nextPatchId++; + patches.push_back(*_aidl_return); + } else { + oldPatch = *existing; + *existing = *_aidl_return; + } + patchesBackup = mPatches; + registerPatch(*_aidl_return); + if (auto status = updateStreamsConnectedState(oldPatch, *_aidl_return); !status.isOk()) { + mPatches = std::move(*patchesBackup); + if (existing == patches.end()) { + patches.pop_back(); + } else { + *existing = oldPatch; + } + return status; + } + + LOG(DEBUG) << __func__ << ": " << mType << ": " << (oldPatch.id == 0 ? "created" : "updated") + << " patch " << _aidl_return->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::setAudioPortConfig(const AudioPortConfig& in_requested, + AudioPortConfig* out_suggested, bool* _aidl_return) { + auto generate = [this](const AudioPort& port, AudioPortConfig* config) { + return generateDefaultPortConfig(port, config); + }; + return setAudioPortConfigImpl(in_requested, generate, out_suggested, _aidl_return); +} + +ndk::ScopedAStatus Module::setAudioPortConfigImpl( + const AudioPortConfig& in_requested, + const std::function& + fillPortConfig, + AudioPortConfig* out_suggested, bool* applied) { + LOG(DEBUG) << __func__ << ": " << mType << ": requested " << in_requested.toString(); + auto& configs = getConfig().portConfigs; + auto existing = configs.end(); + if (in_requested.id != 0) { + if (existing = findById(configs, in_requested.id); + existing == configs.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": existing port config id " + << in_requested.id << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + + const int portId = existing != configs.end() ? existing->portId : in_requested.portId; + if (portId == 0) { + LOG(ERROR) << __func__ << ": " << mType + << ": requested port config does not specify portId"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto& ports = getConfig().ports; + auto portIt = findById(ports, portId); + if (portIt == ports.end()) { + LOG(ERROR) << __func__ << ": " << mType + << ": requested port config points to non-existent portId " << portId; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (existing != configs.end()) { + *out_suggested = *existing; + } else { + AudioPortConfig newConfig; + newConfig.portId = portIt->id; + if (fillPortConfig(*portIt, &newConfig)) { + *out_suggested = newConfig; + } else { + LOG(ERROR) << __func__ << ": " << mType + << ": unable generate a default config for port " << portId; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + // From this moment, 'out_suggested' is either an existing port config, + // or a new generated config. Now attempt to update it according to the specified + // fields of 'in_requested'. + + // Device ports with only dynamic profiles are used for devices that are connected via ADSP, + // which takes care of their actual configuration automatically. + const bool allowDynamicConfig = portIt->ext.getTag() == AudioPortExt::device && + hasDynamicProfilesOnly(portIt->profiles); + bool requestedIsValid = true, requestedIsFullySpecified = true; + + AudioIoFlags portFlags = portIt->flags; + if (in_requested.flags.has_value()) { + if (in_requested.flags.value() != portFlags) { + LOG(WARNING) << __func__ << ": " << mType << ": requested flags " + << in_requested.flags.value().toString() << " do not match port's " + << portId << " flags " << portFlags.toString(); + requestedIsValid = false; + } + } else { + requestedIsFullySpecified = false; + } + + AudioProfile portProfile; + if (in_requested.format.has_value()) { + const auto& format = in_requested.format.value(); + if ((format == AudioFormatDescription{} && allowDynamicConfig) || + findAudioProfile(*portIt, format, &portProfile)) { + out_suggested->format = format; + } else { + LOG(WARNING) << __func__ << ": " << mType << ": requested format " << format.toString() + << " is not found in the profiles of port " << portId; + requestedIsValid = false; + } + } else { + requestedIsFullySpecified = false; + } + if (!(out_suggested->format.value() == AudioFormatDescription{} && allowDynamicConfig) && + !findAudioProfile(*portIt, out_suggested->format.value(), &portProfile)) { + LOG(ERROR) << __func__ << ": " << mType << ": port " << portId + << " does not support format " << out_suggested->format.value().toString() + << " anymore"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + if (in_requested.channelMask.has_value()) { + const auto& channelMask = in_requested.channelMask.value(); + if ((channelMask == AudioChannelLayout{} && allowDynamicConfig) || + find(portProfile.channelMasks.begin(), portProfile.channelMasks.end(), channelMask) != + portProfile.channelMasks.end()) { + out_suggested->channelMask = channelMask; + } else { + LOG(WARNING) << __func__ << ": " << mType << ": requested channel mask " + << channelMask.toString() << " is not supported for the format " + << portProfile.format.toString() << " by the port " << portId; + requestedIsValid = false; + } + } else { + requestedIsFullySpecified = false; + } + + if (in_requested.sampleRate.has_value()) { + const auto& sampleRate = in_requested.sampleRate.value(); + if ((sampleRate.value == 0 && allowDynamicConfig) || + find(portProfile.sampleRates.begin(), portProfile.sampleRates.end(), + sampleRate.value) != portProfile.sampleRates.end()) { + out_suggested->sampleRate = sampleRate; + } else { + LOG(WARNING) << __func__ << ": " << mType << ": requested sample rate " + << sampleRate.value << " is not supported for the format " + << portProfile.format.toString() << " by the port " << portId; + requestedIsValid = false; + } + } else { + requestedIsFullySpecified = false; + } + + if (in_requested.gain.has_value()) { + if (!setAudioPortConfigGain(*portIt, in_requested.gain.value())) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + out_suggested->gain = in_requested.gain.value(); + } + + if (in_requested.ext.getTag() != AudioPortExt::Tag::unspecified) { + if (in_requested.ext.getTag() == out_suggested->ext.getTag()) { + if (out_suggested->ext.getTag() == AudioPortExt::Tag::mix) { + // 'AudioMixPortExt.handle' and '.usecase' are set by the client, + // copy from in_requested. + const auto& src = in_requested.ext.get(); + auto& dst = out_suggested->ext.get(); + dst.handle = src.handle; + dst.usecase = src.usecase; + } + } else { + LOG(WARNING) << __func__ << ": " << mType << ": requested ext tag " + << toString(in_requested.ext.getTag()) << " do not match port's tag " + << toString(out_suggested->ext.getTag()); + requestedIsValid = false; + } + } + + if (existing == configs.end() && requestedIsValid && requestedIsFullySpecified) { + out_suggested->id = getConfig().nextPortId++; + configs.push_back(*out_suggested); + *applied = true; + LOG(DEBUG) << __func__ << ": " << mType << ": created new port config " + << out_suggested->toString(); + } else if (existing != configs.end() && requestedIsValid) { + *existing = *out_suggested; + *applied = true; + LOG(DEBUG) << __func__ << ": " << mType << ": updated port config " + << out_suggested->toString(); + } else { + LOG(DEBUG) << __func__ << ": " << mType << ": not applied; existing config ? " + << (existing != configs.end()) << "; requested is valid? " << requestedIsValid + << ", fully specified? " << requestedIsFullySpecified; + *applied = false; + } + return ndk::ScopedAStatus::ok(); +} + +bool Module::setAudioPortConfigGain(const AudioPort& port, const AudioGainConfig& gainRequested) { + auto& ports = getConfig().ports; + if (gainRequested.index < 0 || gainRequested.index >= (int)port.gains.size()) { + LOG(ERROR) << __func__ << ": gains for port " << port.id << " is undefined"; + return false; + } + int stepValue = port.gains[gainRequested.index].stepValue; + if (stepValue == 0) { + LOG(ERROR) << __func__ << ": port gain step value is 0"; + return false; + } + int minValue = port.gains[gainRequested.index].minValue; + int maxValue = port.gains[gainRequested.index].maxValue; + if (gainRequested.values[0] > maxValue || gainRequested.values[0] < minValue) { + LOG(ERROR) << __func__ << ": gain value " << gainRequested.values[0] + << " out of range of min and max gain config"; + return false; + } + int gainIndex = (gainRequested.values[0] - minValue) / stepValue; + int totalSteps = (maxValue - minValue) / stepValue; + if (totalSteps == 0) { + LOG(ERROR) << __func__ << ": difference between port gain min value " << minValue + << " and max value " << maxValue << " is less than step value " << stepValue; + return false; + } + // Root-power quantities are used in curve: + // 10^((minMb / 100 + (maxMb / 100 - minMb / 100) * gainIndex / totalSteps) / (10 * 2)) + // where 100 is the conversion from mB to dB, 10 comes from the log 10 conversion from power + // ratios, and 2 means are the square of amplitude. + float gain = + pow(10, (minValue + (maxValue - minValue) * (gainIndex / (float)totalSteps)) / 2000); + if (gain < 0) { + LOG(ERROR) << __func__ << ": gain " << gain << " is less than 0"; + return false; + } + for (const auto& route : getConfig().routes) { + if (route.sinkPortId != port.id) { + continue; + } + for (const auto sourcePortId : route.sourcePortIds) { + mStreams.setGain(sourcePortId, gain); + } + } + return true; +} + +ndk::ScopedAStatus Module::resetAudioPatch(int32_t in_patchId) { + auto& patches = getConfig().patches; + auto patchIt = findById(patches, in_patchId); + if (patchIt != patches.end()) { + auto patchesBackup = mPatches; + cleanUpPatch(patchIt->id); + if (auto status = updateStreamsConnectedState(*patchIt, AudioPatch{}); !status.isOk()) { + mPatches = std::move(patchesBackup); + return status; + } + patches.erase(patchIt); + LOG(DEBUG) << __func__ << ": " << mType << ": erased patch " << in_patchId; + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": " << mType << ": patch id " << in_patchId << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Module::resetAudioPortConfig(int32_t in_portConfigId) { + auto& configs = getConfig().portConfigs; + auto configIt = findById(configs, in_portConfigId); + if (configIt != configs.end()) { + if (mStreams.count(in_portConfigId) != 0) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_portConfigId + << " has a stream opened on it"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + auto patchIt = mPatches.find(in_portConfigId); + if (patchIt != mPatches.end()) { + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_portConfigId + << " is used by the patch with id " << patchIt->second; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + auto& initials = getConfig().initialConfigs; + auto initialIt = findById(initials, in_portConfigId); + if (initialIt == initials.end()) { + configs.erase(configIt); + LOG(DEBUG) << __func__ << ": " << mType << ": erased port config " << in_portConfigId; + } else if (*configIt != *initialIt) { + *configIt = *initialIt; + LOG(DEBUG) << __func__ << ": " << mType << ": reset port config " << in_portConfigId; + } + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": " << mType << ": port config id " << in_portConfigId + << " not found"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Module::getMasterMute(bool* _aidl_return) { + *_aidl_return = mMasterMute; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::setMasterMute(bool in_mute) { + LOG(DEBUG) << __func__ << ": " << mType << ": " << in_mute; + auto result = mDebug.simulateDeviceConnections ? ndk::ScopedAStatus::ok() + : onMasterMuteChanged(in_mute); + if (result.isOk()) { + mMasterMute = in_mute; + } else { + LOG(ERROR) << __func__ << ": " << mType << ": failed calling onMasterMuteChanged(" + << in_mute << "), error=" << result; + // Reset master mute if it failed. + onMasterMuteChanged(mMasterMute); + } + return result; +} + +ndk::ScopedAStatus Module::getMasterVolume(float* _aidl_return) { + *_aidl_return = mMasterVolume; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::setMasterVolume(float in_volume) { + LOG(DEBUG) << __func__ << ": " << mType << ": " << in_volume; + if (in_volume >= 0.0f && in_volume <= 1.0f) { + auto result = mDebug.simulateDeviceConnections ? ndk::ScopedAStatus::ok() + : onMasterVolumeChanged(in_volume); + if (result.isOk()) { + mMasterVolume = in_volume; + } else { + // Reset master volume if it failed. + LOG(ERROR) << __func__ << ": " << mType << ": failed calling onMasterVolumeChanged(" + << in_volume << "), error=" << result; + onMasterVolumeChanged(mMasterVolume); + } + return result; + } + LOG(ERROR) << __func__ << ": " << mType << ": invalid master volume value: " << in_volume; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Module::getMicMute(bool* _aidl_return) { + *_aidl_return = mMicMute; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::setMicMute(bool in_mute) { + LOG(DEBUG) << __func__ << ": " << mType << ": " << in_mute; + mMicMute = in_mute; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getMicrophones(std::vector* _aidl_return) { + *_aidl_return = getMicrophoneInfos(); + LOG(DEBUG) << __func__ << ": " << mType << ": returning " + << ::android::internal::ToString(*_aidl_return); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::updateAudioMode(AudioMode in_mode) { + if (!isValidAudioMode(in_mode)) { + LOG(ERROR) << __func__ << ": " << mType << ": invalid mode " << toString(in_mode); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + // No checks for supported audio modes here, it's an informative notification. + LOG(DEBUG) << __func__ << ": " << mType << ": " << toString(in_mode); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::updateScreenRotation(ScreenRotation in_rotation) { + LOG(DEBUG) << __func__ << ": " << mType << ": " << toString(in_rotation); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::updateScreenState(bool in_isTurnedOn) { + LOG(DEBUG) << __func__ << ": " << mType << ": " << in_isTurnedOn; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getSoundDose(std::shared_ptr* _aidl_return) { + if (!mSoundDose) { + mSoundDose = ndk::SharedRefBase::make(); + } + *_aidl_return = mSoundDose.getInstance(); + LOG(DEBUG) << __func__ << ": " << mType + << ": returning instance of ISoundDose: " << _aidl_return->get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::generateHwAvSyncId(int32_t* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +const std::string Module::VendorDebug::kForceTransientBurstName = "aosp.forceTransientBurst"; +const std::string Module::VendorDebug::kForceSynchronousDrainName = "aosp.forceSynchronousDrain"; +const std::string Module::kClipTransitionSupportName = "aosp.clipTransitionSupport"; + +ndk::ScopedAStatus Module::getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) { + LOG(VERBOSE) << __func__ << ": " << mType << ": id count: " << in_ids.size(); + bool allParametersKnown = true; + for (const auto& id : in_ids) { + if (id == VendorDebug::kForceTransientBurstName) { + VendorParameter forceTransientBurst{.id = id}; + forceTransientBurst.ext.setParcelable(Boolean{mVendorDebug.forceTransientBurst}); + _aidl_return->push_back(std::move(forceTransientBurst)); + } else if (id == VendorDebug::kForceSynchronousDrainName) { + VendorParameter forceSynchronousDrain{.id = id}; + forceSynchronousDrain.ext.setParcelable(Boolean{mVendorDebug.forceSynchronousDrain}); + _aidl_return->push_back(std::move(forceSynchronousDrain)); + } else if (id == kClipTransitionSupportName) { + VendorParameter clipTransitionSupport{.id = id}; + clipTransitionSupport.ext.setParcelable(Boolean{true}); + _aidl_return->push_back(std::move(clipTransitionSupport)); + } else { + allParametersKnown = false; + LOG(VERBOSE) << __func__ << ": " << mType << ": unrecognized parameter \"" << id << "\""; + } + } + if (allParametersKnown) return ndk::ScopedAStatus::ok(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +namespace { + +template +bool extractParameter(const VendorParameter& p, decltype(W::value)* v) { + std::optional value; + binder_status_t result = p.ext.getParcelable(&value); + if (result == STATUS_OK && value.has_value()) { + *v = value.value().value; + return true; + } + LOG(ERROR) << __func__ << ": failed to read the value of the parameter \"" << p.id + << "\": " << result; + return false; +} + +} // namespace + +ndk::ScopedAStatus Module::setVendorParameters(const std::vector& in_parameters, + bool in_async) { + LOG(VERBOSE) << __func__ << ": " << mType << ": parameter count " << in_parameters.size() + << ", async: " << in_async; + bool allParametersKnown = true; + for (const auto& p : in_parameters) { + if (p.id == VendorDebug::kForceTransientBurstName) { + if (!extractParameter(p, &mVendorDebug.forceTransientBurst)) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } else if (p.id == VendorDebug::kForceSynchronousDrainName) { + if (!extractParameter(p, &mVendorDebug.forceSynchronousDrain)) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } else { + allParametersKnown = false; + LOG(VERBOSE) << __func__ << ": " << mType << ": unrecognized parameter \"" << p.id + << "\""; + } + } + if (allParametersKnown) return ndk::ScopedAStatus::ok(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); +} + +ndk::ScopedAStatus Module::addDeviceEffect( + int32_t in_portConfigId, + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": " << mType << ": port id " << in_portConfigId + << ", null effect"; + } else { + LOG(DEBUG) << __func__ << ": " << mType << ": port id " << in_portConfigId + << ", effect Binder " << in_effect->asBinder().get(); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Module::removeDeviceEffect( + int32_t in_portConfigId, + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": " << mType << ": port id " << in_portConfigId + << ", null effect"; + } else { + LOG(DEBUG) << __func__ << ": " << mType << ": port id " << in_portConfigId + << ", effect Binder " << in_effect->asBinder().get(); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Module::getMmapPolicyInfos(AudioMMapPolicyType mmapPolicyType, + std::vector* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType << ": mmap policy type " << toString(mmapPolicyType); + std::set mmapSinks; + std::set mmapSources; + auto& ports = getConfig().ports; + for (const auto& port : ports) { + if (port.flags.getTag() == AudioIoFlags::Tag::input && + isBitPositionFlagSet(port.flags.get(), + AudioInputFlags::MMAP_NOIRQ)) { + mmapSinks.insert(port.id); + } else if (port.flags.getTag() == AudioIoFlags::Tag::output && + isBitPositionFlagSet(port.flags.get(), + AudioOutputFlags::MMAP_NOIRQ)) { + mmapSources.insert(port.id); + } + } + if (mmapSources.empty() && mmapSinks.empty()) { + AudioMMapPolicyInfo never; + never.mmapPolicy = AudioMMapPolicy::NEVER; + _aidl_return->push_back(never); + return ndk::ScopedAStatus::ok(); + } + for (const auto& route : getConfig().routes) { + if (mmapSinks.count(route.sinkPortId) != 0) { + // The sink is a mix port, add the sources if they are device ports. + for (int sourcePortId : route.sourcePortIds) { + auto sourcePortIt = findById(ports, sourcePortId); + if (sourcePortIt == ports.end()) { + // This must not happen + LOG(ERROR) << __func__ << ": " << mType << ": port id " << sourcePortId + << " cannot be found"; + continue; + } + if (sourcePortIt->ext.getTag() != AudioPortExt::Tag::device) { + // The source is not a device port, skip + continue; + } + AudioMMapPolicyInfo policyInfo; + policyInfo.device = sourcePortIt->ext.get().device; + // Always return AudioMMapPolicy.AUTO if the device supports mmap for + // default implementation. + policyInfo.mmapPolicy = AudioMMapPolicy::AUTO; + _aidl_return->push_back(policyInfo); + } + } else { + auto sinkPortIt = findById(ports, route.sinkPortId); + if (sinkPortIt == ports.end()) { + // This must not happen + LOG(ERROR) << __func__ << ": " << mType << ": port id " << route.sinkPortId + << " cannot be found"; + continue; + } + if (sinkPortIt->ext.getTag() != AudioPortExt::Tag::device) { + // The sink is not a device port, skip + continue; + } + if (count_any(mmapSources, route.sourcePortIds)) { + AudioMMapPolicyInfo policyInfo; + policyInfo.device = sinkPortIt->ext.get().device; + // Always return AudioMMapPolicy.AUTO if the device supports mmap for + // default implementation. + policyInfo.mmapPolicy = AudioMMapPolicy::AUTO; + _aidl_return->push_back(policyInfo); + } + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::supportsVariableLatency(bool* _aidl_return) { + LOG(DEBUG) << __func__ << ": " << mType; + *_aidl_return = false; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAAudioMixerBurstCount(int32_t* _aidl_return) { + if (!isMmapSupported()) { + LOG(DEBUG) << __func__ << ": " << mType << ": mmap is not supported "; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + *_aidl_return = DEFAULT_AAUDIO_MIXER_BURST_COUNT; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) { + if (!isMmapSupported()) { + LOG(DEBUG) << __func__ << ": " << mType << ": mmap is not supported "; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + *_aidl_return = DEFAULT_AAUDIO_HARDWARE_BURST_MIN_DURATION_US; + LOG(DEBUG) << __func__ << ": " << mType << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +bool Module::isMmapSupported() { + if (mIsMmapSupported.has_value()) { + return mIsMmapSupported.value(); + } + std::vector mmapPolicyInfos; + if (!getMmapPolicyInfos(AudioMMapPolicyType::DEFAULT, &mmapPolicyInfos).isOk()) { + mIsMmapSupported = false; + } else { + mIsMmapSupported = + std::find_if(mmapPolicyInfos.begin(), mmapPolicyInfos.end(), [](const auto& info) { + return info.mmapPolicy == AudioMMapPolicy::AUTO || + info.mmapPolicy == AudioMMapPolicy::ALWAYS; + }) != mmapPolicyInfos.end(); + } + return mIsMmapSupported.value(); +} + +ndk::ScopedAStatus Module::populateConnectedDevicePort(AudioPort* audioPort, int32_t) { + if (audioPort->ext.getTag() != AudioPortExt::device) { + LOG(ERROR) << __func__ << ": " << mType << ": not a device port: " << audioPort->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const auto& devicePort = audioPort->ext.get(); + if (!devicePort.device.type.connection.empty()) { + LOG(ERROR) << __func__ << ": " << mType << ": module implementation must override " + "'populateConnectedDevicePort' " + << "to handle connection of external devices."; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + LOG(VERBOSE) << __func__ << ": " << mType << ": do nothing and return ok"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::checkAudioPatchEndpointsMatch( + const std::vector& sources __unused, + const std::vector& sinks __unused) { + LOG(VERBOSE) << __func__ << ": " << mType << ": do nothing and return ok"; + return ndk::ScopedAStatus::ok(); +} + +void Module::onExternalDeviceConnectionChanged( + const ::aidl::android::media::audio::common::AudioPort& audioPort __unused, + bool connected __unused) { + LOG(DEBUG) << __func__ << ": " << mType << ": do nothing and return"; +} + +void Module::onPrepareToDisconnectExternalDevice( + const ::aidl::android::media::audio::common::AudioPort& audioPort __unused) { + LOG(DEBUG) << __func__ << ": " << mType << ": do nothing and return"; +} + +ndk::ScopedAStatus Module::onMasterMuteChanged(bool mute __unused) { + LOG(VERBOSE) << __func__ << ": " << mType << ": do nothing and return ok"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Module::onMasterVolumeChanged(float volume __unused) { + LOG(VERBOSE) << __func__ << ": " << mType << ": do nothing and return ok"; + return ndk::ScopedAStatus::ok(); +} + +std::vector Module::getMicrophoneInfos() { + std::vector result; + Configuration& config = getConfig(); + for (const AudioPort& port : config.ports) { + if (port.ext.getTag() == AudioPortExt::Tag::device) { + const AudioDeviceType deviceType = + port.ext.get().device.type.type; + if (deviceType == AudioDeviceType::IN_MICROPHONE || + deviceType == AudioDeviceType::IN_MICROPHONE_BACK) { + // Placeholder values. Vendor implementations must populate MicrophoneInfo + // accordingly based on their physical microphone parameters. + result.push_back(MicrophoneInfo{ + .id = port.name, + .device = port.ext.get().device, + .group = 0, + .indexInTheGroup = 0, + }); + } + } + } + return result; +} + +ndk::ScopedAStatus Module::bluetoothParametersUpdated() { + return mStreams.bluetoothParametersUpdated(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/ModulePrimary.cpp b/audio/ModulePrimary.cpp new file mode 100644 index 0000000..6cb9251 --- /dev/null +++ b/audio/ModulePrimary.cpp @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_ModulePrimary" +#include +#include + +#include "core-impl/ModulePrimary.h" +#include "core-impl/StreamMmapStub.h" +#include "core-impl/StreamOffloadStub.h" +#include "core-impl/StreamPrimary.h" +#include "core-impl/Telephony.h" + +using aidl::android::hardware::audio::common::areAllBitPositionFlagsSet; +using aidl::android::hardware::audio::common::hasMmapFlag; +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::audio::core::StreamDescriptor; +using aidl::android::media::audio::common::AudioInputFlags; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioOutputFlags; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +ndk::ScopedAStatus ModulePrimary::getTelephony(std::shared_ptr* _aidl_return) { + if (!mTelephony) { + mTelephony = ndk::SharedRefBase::make(); + } + *_aidl_return = mTelephony.getInstance(); + LOG(DEBUG) << __func__ + << ": returning instance of ITelephony: " << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModulePrimary::calculateBufferSizeFrames( + const ::aidl::android::media::audio::common::AudioFormatDescription& format, + int32_t latencyMs, int32_t sampleRateHz, int32_t* bufferSizeFrames) { + if (format.type != ::aidl::android::media::audio::common::AudioFormatType::PCM && + StreamOffloadStub::getSupportedEncodings().count(format.encoding)) { + *bufferSizeFrames = sampleRateHz / 2; // 1/2 of a second. + return ndk::ScopedAStatus::ok(); + } + return Module::calculateBufferSizeFrames(format, latencyMs, sampleRateHz, bufferSizeFrames); +} + +ndk::ScopedAStatus ModulePrimary::createInputStream(StreamContext&& context, + const SinkMetadata& sinkMetadata, + const std::vector& microphones, + std::shared_ptr* result) { + if (context.isMmap()) { + // "Stub" is used because there is no support for MMAP audio I/O on CVD. + return createStreamInstance(result, std::move(context), sinkMetadata, + microphones); + } + return createStreamInstance(result, std::move(context), sinkMetadata, + microphones); +} + +ndk::ScopedAStatus ModulePrimary::createOutputStream( + StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, std::shared_ptr* result) { + if (context.isMmap()) { + // "Stub" is used because there is no support for MMAP audio I/O on CVD. + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo); + } else if (areAllBitPositionFlagsSet( + context.getFlags().get(), + {AudioOutputFlags::COMPRESS_OFFLOAD, AudioOutputFlags::NON_BLOCKING})) { + // "Stub" is used because there is no actual decoder. The stream just + // extracts the clip duration from the media file header and simulates + // playback over time. + return createStreamInstance(result, std::move(context), + sourceMetadata, offloadInfo); + } + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo); +} + +ndk::ScopedAStatus ModulePrimary::createMmapBuffer(const AudioPortConfig& portConfig, + int32_t bufferSizeFrames, int32_t frameSizeBytes, + MmapBufferDescriptor* desc) { + const size_t bufferSizeBytes = static_cast(bufferSizeFrames) * frameSizeBytes; + // The actual mmap buffer for I/O is created after the stream exits standby, via + // 'IStreamCommon.createMmapBuffer'. But we must return a valid file descriptor here because + // 'MmapBufferDescriptor' can not contain a "null" fd. + const std::string regionName = + std::string("mmap-sim-o-") + + std::to_string(portConfig.ext.get().handle); + int fd = ashmem_create_region(regionName.c_str(), bufferSizeBytes); + if (fd < 0) { + PLOG(ERROR) << __func__ << ": failed to create shared memory region of " << bufferSizeBytes + << " bytes"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + desc->sharedMemory.fd = ndk::ScopedFileDescriptor(fd); + desc->sharedMemory.size = bufferSizeBytes; + desc->burstSizeFrames = bufferSizeFrames / 2; + desc->flags = 0; + LOG(DEBUG) << __func__ << ": " << desc->toString(); + return ndk::ScopedAStatus::ok(); +} + +int32_t ModulePrimary::getNominalLatencyMs(const AudioPortConfig& portConfig) { + static constexpr int32_t kLowLatencyMs = 5; + // 85 ms is chosen considering 4096 frames @ 48 kHz. This is the value which allows + // the virtual Android device implementation to pass CTS. Hardware implementations + // should have significantly lower latency. + static constexpr int32_t kStandardLatencyMs = 85; + return hasMmapFlag(portConfig.flags.value()) ? kLowLatencyMs : kStandardLatencyMs; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/SoundDose.cpp b/audio/SoundDose.cpp new file mode 100644 index 0000000..6c3a067 --- /dev/null +++ b/audio/SoundDose.cpp @@ -0,0 +1,147 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_SoundDose" + +#include "core-impl/SoundDose.h" + +#include +#include +#include +#include + +using aidl::android::hardware::audio::core::sounddose::ISoundDose; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioFormatDescription; + +namespace aidl::android::hardware::audio::core::sounddose { + +ndk::ScopedAStatus SoundDose::setOutputRs2UpperBound(float in_rs2ValueDbA) { + if (in_rs2ValueDbA < MIN_RS2 || in_rs2ValueDbA > DEFAULT_MAX_RS2) { + LOG(ERROR) << __func__ << ": RS2 value is invalid: " << in_rs2ValueDbA; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + ::android::audio_utils::lock_guard l(mMutex); + mRs2Value = in_rs2ValueDbA; + if (mMelProcessor != nullptr) { + mMelProcessor->setOutputRs2UpperBound(in_rs2ValueDbA); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus SoundDose::getOutputRs2UpperBound(float* _aidl_return) { + ::android::audio_utils::lock_guard l(mMutex); + *_aidl_return = mRs2Value; + LOG(DEBUG) << __func__ << ": returning " << *_aidl_return; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus SoundDose::registerSoundDoseCallback( + const std::shared_ptr& in_callback) { + if (in_callback.get() == nullptr) { + LOG(ERROR) << __func__ << ": Callback is nullptr"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + + ::android::audio_utils::lock_guard l(mCbMutex); + if (mCallback != nullptr) { + LOG(ERROR) << __func__ << ": Sound dose callback was already registered"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + + mCallback = in_callback; + LOG(DEBUG) << __func__ << ": Registered sound dose callback "; + + return ndk::ScopedAStatus::ok(); +} + +void SoundDose::setAudioDevice(const AudioDevice& audioDevice) { + ::android::audio_utils::lock_guard l(mCbMutex); + mAudioDevice = audioDevice; +} + +void SoundDose::startDataProcessor(uint32_t sampleRate, uint32_t channelCount, + const AudioFormatDescription& aidlFormat) { + ::android::audio_utils::lock_guard l(mMutex); + const auto result = aidl2legacy_AudioFormatDescription_audio_format_t(aidlFormat); + const audio_format_t format = result.value_or(AUDIO_FORMAT_INVALID); + + if (mMelProcessor == nullptr) { + // we don't have the deviceId concept on the vendor side so just pass 0 + mMelProcessor = ::android::sp<::android::audio_utils::MelProcessor>::make( + sampleRate, channelCount, format, mMelCallback, /*deviceId=*/0, mRs2Value); + } else { + mMelProcessor->updateAudioFormat(sampleRate, channelCount, format); + } +} + +void SoundDose::process(const void* buffer, size_t bytes) { + ::android::audio_utils::lock_guard l(mMutex); + if (mMelProcessor != nullptr) { + mMelProcessor->process(buffer, bytes); + } +} + +void SoundDose::onNewMelValues(const std::vector& mels, size_t offset, size_t length, + audio_port_handle_t deviceId __attribute__((__unused__))) const { + ::android::audio_utils::lock_guard l(mCbMutex); + if (!mAudioDevice.has_value()) { + LOG(WARNING) << __func__ << ": New mel values without a registered device"; + return; + } + if (mCallback == nullptr) { + LOG(ERROR) << __func__ << ": New mel values without a registered callback"; + return; + } + + ISoundDose::IHalSoundDoseCallback::MelRecord melRecord; + melRecord.timestamp = nanoseconds_to_seconds(systemTime()); + melRecord.melValues = std::vector(mels.begin() + offset, mels.begin() + offset + length); + + mCallback->onNewMelValues(melRecord, mAudioDevice.value()); +} + +void SoundDose::MelCallback::onNewMelValues(const std::vector& mels, size_t offset, + size_t length, + audio_port_handle_t deviceId + __attribute__((__unused__)), + bool attenuated __attribute__((__unused__))) const { + mSoundDose.onNewMelValues(mels, offset, length, deviceId); +} + +void SoundDose::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId + __attribute__((__unused__))) const { + ::android::audio_utils::lock_guard l(mCbMutex); + if (!mAudioDevice.has_value()) { + LOG(WARNING) << __func__ << ": Momentary exposure without a registered device"; + return; + } + if (mCallback == nullptr) { + LOG(ERROR) << __func__ << ": Momentary exposure without a registered callback"; + return; + } + + mCallback->onMomentaryExposureWarning(currentMel, mAudioDevice.value()); +} + +void SoundDose::MelCallback::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId + __attribute__((__unused__))) const { + mSoundDose.onMomentaryExposure(currentMel, deviceId); +} + +} // namespace aidl::android::hardware::audio::core::sounddose diff --git a/audio/Stream.cpp b/audio/Stream.cpp new file mode 100644 index 0000000..873fc48 --- /dev/null +++ b/audio/Stream.cpp @@ -0,0 +1,1195 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define ATRACE_TAG ATRACE_TAG_AUDIO +#define LOG_TAG "AHAL_Stream" +#include +#include +#include +#include +#include +#include + +#include "core-impl/Stream.h" + +using aidl::android::hardware::audio::common::AudioOffloadMetadata; +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::hardware::audio::common::getFrameSizeInBytes; +using aidl::android::hardware::audio::common::hasMmapFlag; +using aidl::android::hardware::audio::common::isBitPositionFlagSet; +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDualMonoMode; +using aidl::android::media::audio::common::AudioInputFlags; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioLatencyMode; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioOutputFlags; +using aidl::android::media::audio::common::AudioPlaybackRate; +using aidl::android::media::audio::common::MicrophoneDynamicInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +namespace { + +template +auto fmqErrorHandler(const char* mqName) { + return [m = std::string(mqName)](MQTypeError fmqError, std::string&& errorMessage) { + CHECK_EQ(fmqError, MQTypeError::NONE) << m << ": " << errorMessage; + }; +} + +} // namespace + +void StreamContext::fillDescriptor(StreamDescriptor* desc) { + if (mCommandMQ) { + desc->command = mCommandMQ->dupeDesc(); + } + if (mReplyMQ) { + desc->reply = mReplyMQ->dupeDesc(); + } + desc->frameSizeBytes = getFrameSize(); + desc->bufferSizeFrames = getBufferSizeInFrames(); + if (mDataMQ) { + desc->audio.set(mDataMQ->dupeDesc()); + } else { + MmapBufferDescriptor mmapDesc; // Move-only due to `fd`. + mmapDesc.sharedMemory.fd = mMmapBufferDesc.sharedMemory.fd.dup(); + mmapDesc.sharedMemory.size = mMmapBufferDesc.sharedMemory.size; + mmapDesc.burstSizeFrames = mMmapBufferDesc.burstSizeFrames; + mmapDesc.flags = mMmapBufferDesc.flags; + desc->audio.set(std::move(mmapDesc)); + } +} + +size_t StreamContext::getBufferSizeInFrames() const { + if (mDataMQ) { + return mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() / getFrameSize(); + } else { + return mMmapBufferDesc.sharedMemory.size / getFrameSize(); + } +} + +size_t StreamContext::getFrameSize() const { + return getFrameSizeInBytes(mFormat, mChannelLayout); +} + +bool StreamContext::isValid() const { + if (mCommandMQ && !mCommandMQ->isValid()) { + LOG(ERROR) << "command FMQ is invalid"; + return false; + } + if (mReplyMQ && !mReplyMQ->isValid()) { + LOG(ERROR) << "reply FMQ is invalid"; + return false; + } + if (getFrameSize() == 0) { + LOG(ERROR) << "frame size is invalid"; + return false; + } + if (!isMmap() && mDataMQ && !mDataMQ->isValid()) { + LOG(ERROR) << "data FMQ is invalid"; + return false; + } else if (isMmap() && + (mMmapBufferDesc.sharedMemory.fd.get() == -1 || + mMmapBufferDesc.sharedMemory.size == 0 || mMmapBufferDesc.burstSizeFrames == 0)) { + LOG(ERROR) << "mmap info is invalid" << mMmapBufferDesc.toString(); + } + return true; +} + +void StreamContext::startStreamDataProcessor() { + auto streamDataProcessor = mStreamDataProcessor.lock(); + if (streamDataProcessor != nullptr) { + streamDataProcessor->startDataProcessor(mSampleRate, getChannelCount(mChannelLayout), + mFormat); + } +} + +void StreamContext::reset() { + mCommandMQ.reset(); + mReplyMQ.reset(); + mDataMQ.reset(); + mMmapBufferDesc.sharedMemory.fd.set(-1); +} + +pid_t StreamWorkerCommonLogic::getTid() const { +#if defined(__ANDROID__) + return pthread_gettid_np(pthread_self()); +#else + return 0; +#endif +} + +std::string StreamWorkerCommonLogic::init() { + if (mContext->getCommandMQ() == nullptr) return "Command MQ is null"; + if (mContext->getReplyMQ() == nullptr) return "Reply MQ is null"; + if (!mContext->isMmap()) { + StreamContext::DataMQ* const dataMQ = mContext->getDataMQ(); + if (dataMQ == nullptr) return "Data MQ is null"; + if (sizeof(DataBufferElement) != dataMQ->getQuantumSize()) { + return "Unexpected Data MQ quantum size: " + std::to_string(dataMQ->getQuantumSize()); + } + mDataBufferSize = dataMQ->getQuantumCount() * dataMQ->getQuantumSize(); + mDataBuffer.reset(new (std::nothrow) DataBufferElement[mDataBufferSize]); + if (mDataBuffer == nullptr) { + return "Failed to allocate data buffer for element count " + + std::to_string(dataMQ->getQuantumCount()) + + ", size in bytes: " + std::to_string(mDataBufferSize); + } + } + if (::android::status_t status = mDriver->init(this /*DriverCallbackInterface*/); + status != STATUS_OK) { + return "Failed to initialize the driver: " + std::to_string(status); + } + return ""; +} + +void StreamWorkerCommonLogic::onBufferStateChange(size_t /*bufferFramesLeft*/) {} +void StreamWorkerCommonLogic::onClipStateChange(size_t /*clipFramesLeft*/, bool /*hasNextClip*/) {} + +void StreamWorkerCommonLogic::populateReply(StreamDescriptor::Reply* reply, + bool isConnected) const { + static const StreamDescriptor::Position kUnknownPosition = { + .frames = StreamDescriptor::Position::UNKNOWN, + .timeNs = StreamDescriptor::Position::UNKNOWN}; + reply->status = STATUS_OK; + if (isConnected) { + reply->observable.frames = mContext->getFrameCount(); + reply->observable.timeNs = ::android::uptimeNanos(); + if (auto status = mDriver->refinePosition(&reply->observable); status != ::android::OK) { + reply->observable = kUnknownPosition; + } + } else { + reply->observable = reply->hardware = kUnknownPosition; + } + if (mContext->isMmap()) { + if (auto status = mDriver->getMmapPositionAndLatency(&reply->hardware, &reply->latencyMs); + status != ::android::OK) { + reply->hardware = kUnknownPosition; + reply->latencyMs = StreamDescriptor::LATENCY_UNKNOWN; + } + } +} + +void StreamWorkerCommonLogic::populateReplyWrongState( + StreamDescriptor::Reply* reply, const StreamDescriptor::Command& command) const { + LOG(WARNING) << "command '" << toString(command.getTag()) + << "' can not be handled in the state " << toString(mState); + reply->status = STATUS_INVALID_OPERATION; +} + +const std::string StreamInWorkerLogic::kThreadName = "reader"; + +StreamInWorkerLogic::Status StreamInWorkerLogic::cycle() { + // Note: for input streams, draining is driven by the client, thus + // "empty buffer" condition can only happen while handling the 'burst' + // command. Thus, unlike for output streams, it does not make sense to + // delay the 'DRAINING' state here by 'mTransientStateDelayMs'. + // TODO: Add a delay for transitions of async operations when/if they added. + + StreamDescriptor::Command command{}; + if (!mContext->getCommandMQ()->readBlocking(&command, 1)) { + LOG(ERROR) << __func__ << ": reading of command from MQ failed"; + mState = StreamDescriptor::State::ERROR; + return Status::ABORT; + } + using Tag = StreamDescriptor::Command::Tag; + using LogSeverity = ::android::base::LogSeverity; + const LogSeverity severity = + command.getTag() == Tag::burst || command.getTag() == Tag::getStatus + ? LogSeverity::VERBOSE + : LogSeverity::DEBUG; + LOG(severity) << __func__ << ": received command " << command.toString() << " in " + << kThreadName; + StreamDescriptor::Reply reply{}; + reply.status = STATUS_BAD_VALUE; + switch (command.getTag()) { + case Tag::halReservedExit: { + const int32_t cookie = command.get(); + StreamInWorkerLogic::Status status = Status::CONTINUE; + if (cookie == (mContext->getInternalCommandCookie() ^ getTid())) { + mDriver->shutdown(); + setClosed(); + status = Status::EXIT; + } else { + LOG(WARNING) << __func__ << ": EXIT command has a bad cookie: " << cookie; + } + if (cookie != 0) { // This is an internal command, no need to reply. + return status; + } + // `cookie == 0` can only occur in the context of a VTS test, need to reply. + break; + } + case Tag::getStatus: + populateReply(&reply, mIsConnected); + break; + case Tag::start: + if (mState == StreamDescriptor::State::STANDBY || + mState == StreamDescriptor::State::DRAINING) { + if (::android::status_t status = mDriver->start(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = mState == StreamDescriptor::State::STANDBY + ? StreamDescriptor::State::IDLE + : StreamDescriptor::State::ACTIVE; + } else { + LOG(ERROR) << __func__ << ": start failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + case Tag::burst: + if (const int32_t fmqByteCount = command.get(); fmqByteCount >= 0) { + LOG(VERBOSE) << __func__ << ": '" << toString(command.getTag()) << "' command for " + << fmqByteCount << " bytes"; + if (mState == StreamDescriptor::State::IDLE || + mState == StreamDescriptor::State::ACTIVE || + mState == StreamDescriptor::State::PAUSED || + mState == StreamDescriptor::State::DRAINING) { + if (bool success = + mContext->isMmap() ? readMmap(&reply) : read(fmqByteCount, &reply); + !success) { + mState = StreamDescriptor::State::ERROR; + } + if (mState == StreamDescriptor::State::IDLE || + mState == StreamDescriptor::State::PAUSED) { + mState = StreamDescriptor::State::ACTIVE; + } else if (mState == StreamDescriptor::State::DRAINING) { + // To simplify the reference code, we assume that the read operation + // has consumed all the data remaining in the hardware buffer. + // In a real implementation, here we would either remain in + // the 'DRAINING' state, or transfer to 'STANDBY' depending on the + // buffer state. + mState = StreamDescriptor::State::STANDBY; + } + } else { + populateReplyWrongState(&reply, command); + } + } else { + LOG(WARNING) << __func__ << ": invalid burst byte count: " << fmqByteCount; + } + break; + case Tag::drain: + if (const auto mode = command.get(); + mode == StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED) { + if (mState == StreamDescriptor::State::ACTIVE) { + if (::android::status_t status = mDriver->drain(mode); + status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = StreamDescriptor::State::DRAINING; + } else { + LOG(ERROR) << __func__ << ": drain failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + } else { + LOG(WARNING) << __func__ << ": invalid drain mode: " << toString(mode); + } + break; + case Tag::standby: + if (mState == StreamDescriptor::State::IDLE) { + populateReply(&reply, mIsConnected); + if (::android::status_t status = mDriver->standby(); status == ::android::OK) { + mState = StreamDescriptor::State::STANDBY; + } else { + LOG(ERROR) << __func__ << ": standby failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + case Tag::pause: + if (mState == StreamDescriptor::State::ACTIVE) { + if (::android::status_t status = mDriver->pause(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = StreamDescriptor::State::PAUSED; + } else { + LOG(ERROR) << __func__ << ": pause failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + case Tag::flush: + if (mState == StreamDescriptor::State::PAUSED) { + if (::android::status_t status = mDriver->flush(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = StreamDescriptor::State::STANDBY; + } else { + LOG(ERROR) << __func__ << ": flush failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + } + reply.state = mState; + LOG(severity) << __func__ << ": writing reply " << reply.toString(); + if (!mContext->getReplyMQ()->writeBlocking(&reply, 1)) { + LOG(ERROR) << __func__ << ": writing of reply " << reply.toString() << " to MQ failed"; + mState = StreamDescriptor::State::ERROR; + return Status::ABORT; + } + return Status::CONTINUE; +} + +bool StreamInWorkerLogic::read(size_t clientSize, StreamDescriptor::Reply* reply) { + ATRACE_CALL(); + StreamContext::DataMQ* const dataMQ = mContext->getDataMQ(); + const size_t byteCount = std::min({clientSize, dataMQ->availableToWrite(), mDataBufferSize}); + const bool isConnected = mIsConnected; + const size_t frameSize = mContext->getFrameSize(); + size_t actualFrameCount = 0; + bool fatal = false; + int32_t latency = mContext->getNominalLatencyMs(); + if (isConnected) { + if (::android::status_t status = mDriver->transfer(mDataBuffer.get(), byteCount / frameSize, + &actualFrameCount, &latency); + status != ::android::OK) { + fatal = true; + LOG(ERROR) << __func__ << ": read failed: " << status; + } + } else { + usleep(3000); // Simulate blocking transfer delay. + for (size_t i = 0; i < byteCount; ++i) mDataBuffer[i] = 0; + actualFrameCount = byteCount / frameSize; + } + const size_t actualByteCount = actualFrameCount * frameSize; + if (bool success = actualByteCount > 0 ? dataMQ->write(&mDataBuffer[0], actualByteCount) : true; + success) { + LOG(VERBOSE) << __func__ << ": writing of " << actualByteCount << " bytes into data MQ" + << " succeeded; connected? " << isConnected; + // Frames are provided and counted regardless of connection status. + reply->fmqByteCount += actualByteCount; + mContext->advanceFrameCount(actualFrameCount); + populateReply(reply, isConnected); + } else { + LOG(WARNING) << __func__ << ": writing of " << actualByteCount + << " bytes of data to MQ failed"; + reply->status = STATUS_NOT_ENOUGH_DATA; + } + reply->latencyMs = latency; + return !fatal; +} + +bool StreamInWorkerLogic::readMmap(StreamDescriptor::Reply* reply) { + void* buffer = nullptr; + size_t frameCount = 0; + size_t actualFrameCount = 0; + int32_t latency = mContext->getNominalLatencyMs(); + // use default-initialized parameter values for mmap stream. + if (::android::status_t status = + mDriver->transfer(buffer, frameCount, &actualFrameCount, &latency); + status == ::android::OK) { + populateReply(reply, mIsConnected); + reply->latencyMs = latency; + return true; + } else { + LOG(ERROR) << __func__ << ": transfer failed: " << status; + return false; + } +} + +const std::string StreamOutWorkerLogic::kThreadName = "writer"; + +void StreamOutWorkerLogic::onBufferStateChange(size_t bufferFramesLeft) { + const StreamDescriptor::State state = mState; + const DrainState drainState = mDrainState; + LOG(DEBUG) << __func__ << ": state: " << toString(state) << ", drainState: " << drainState + << ", bufferFramesLeft: " << bufferFramesLeft; + if (state == StreamDescriptor::State::TRANSFERRING || drainState == DrainState::EN_SENT) { + if (state == StreamDescriptor::State::TRANSFERRING) { + mState = StreamDescriptor::State::ACTIVE; + } + std::shared_ptr asyncCallback = mContext->getAsyncCallback(); + if (asyncCallback != nullptr) { + LOG(VERBOSE) << __func__ << ": sending onTransferReady"; + ndk::ScopedAStatus status = asyncCallback->onTransferReady(); + if (!status.isOk()) { + LOG(ERROR) << __func__ << ": error from onTransferReady: " << status; + } + } + } +} + +void StreamOutWorkerLogic::onClipStateChange(size_t clipFramesLeft, bool hasNextClip) { + const DrainState drainState = mDrainState; + std::shared_ptr asyncCallback = mContext->getAsyncCallback(); + LOG(DEBUG) << __func__ << ": drainState: " << drainState << "; clipFramesLeft " + << clipFramesLeft << "; hasNextClip? " << hasNextClip << "; asyncCallback? " + << (asyncCallback != nullptr); + if (drainState != DrainState::NONE && clipFramesLeft == 0) { + mState = + hasNextClip ? StreamDescriptor::State::TRANSFERRING : StreamDescriptor::State::IDLE; + mDrainState = DrainState::NONE; + if ((drainState == DrainState::ALL || drainState == DrainState::EN_SENT) && + asyncCallback != nullptr) { + LOG(DEBUG) << __func__ << ": sending onDrainReady"; + // For EN_SENT, this is the second onDrainReady which notifies about clip transition. + ndk::ScopedAStatus status = asyncCallback->onDrainReady(); + if (!status.isOk()) { + LOG(ERROR) << __func__ << ": error from onDrainReady: " << status; + } + } + } else if (drainState == DrainState::EN && clipFramesLeft > 0) { + // The stream state does not change, it is still draining. + mDrainState = DrainState::EN_SENT; + if (asyncCallback != nullptr) { + LOG(DEBUG) << __func__ << ": sending onDrainReady"; + ndk::ScopedAStatus status = asyncCallback->onDrainReady(); + if (!status.isOk()) { + LOG(ERROR) << __func__ << ": error from onDrainReady: " << status; + } + } + } +} + +StreamOutWorkerLogic::Status StreamOutWorkerLogic::cycle() { + // Non-blocking mode is handled within 'onClipStateChange' + if (std::shared_ptr asyncCallback = mContext->getAsyncCallback(); + mState == StreamDescriptor::State::DRAINING && asyncCallback == nullptr) { + if (auto stateDurationMs = std::chrono::duration_cast( + std::chrono::steady_clock::now() - mTransientStateStart); + stateDurationMs >= mTransientStateDelayMs) { + mState = StreamDescriptor::State::IDLE; + if (mTransientStateDelayMs.count() != 0) { + LOG(DEBUG) << __func__ << ": switched to state " << toString(mState) + << " after a timeout"; + } + } + } + + StreamDescriptor::Command command{}; + if (!mContext->getCommandMQ()->readBlocking(&command, 1)) { + LOG(ERROR) << __func__ << ": reading of command from MQ failed"; + mState = StreamDescriptor::State::ERROR; + return Status::ABORT; + } + using Tag = StreamDescriptor::Command::Tag; + using LogSeverity = ::android::base::LogSeverity; + const LogSeverity severity = + command.getTag() == Tag::burst || command.getTag() == Tag::getStatus + ? LogSeverity::VERBOSE + : LogSeverity::DEBUG; + LOG(severity) << __func__ << ": received command " << command.toString() << " in " + << kThreadName; + StreamDescriptor::Reply reply{}; + reply.status = STATUS_BAD_VALUE; + using Tag = StreamDescriptor::Command::Tag; + switch (command.getTag()) { + case Tag::halReservedExit: { + const int32_t cookie = command.get(); + StreamOutWorkerLogic::Status status = Status::CONTINUE; + if (cookie == (mContext->getInternalCommandCookie() ^ getTid())) { + mDriver->shutdown(); + setClosed(); + status = Status::EXIT; + } else { + LOG(WARNING) << __func__ << ": EXIT command has a bad cookie: " << cookie; + } + if (cookie != 0) { // This is an internal command, no need to reply. + return status; + } + // `cookie == 0` can only occur in the context of a VTS test, need to reply. + break; + } + case Tag::getStatus: + populateReply(&reply, mIsConnected); + break; + case Tag::start: { + std::optional nextState; + switch (mState) { + case StreamDescriptor::State::STANDBY: + nextState = StreamDescriptor::State::IDLE; + break; + case StreamDescriptor::State::PAUSED: + nextState = StreamDescriptor::State::ACTIVE; + break; + case StreamDescriptor::State::DRAIN_PAUSED: + nextState = StreamDescriptor::State::DRAINING; + break; + case StreamDescriptor::State::TRANSFER_PAUSED: + nextState = StreamDescriptor::State::TRANSFERRING; + break; + default: + populateReplyWrongState(&reply, command); + } + if (nextState.has_value()) { + if (::android::status_t status = mDriver->start(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + if (*nextState == StreamDescriptor::State::IDLE || + *nextState == StreamDescriptor::State::ACTIVE) { + mState = *nextState; + } else { + switchToTransientState(*nextState); + } + } else { + LOG(ERROR) << __func__ << ": start failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } + } break; + case Tag::burst: + if (const int32_t fmqByteCount = command.get(); fmqByteCount >= 0) { + LOG(VERBOSE) << __func__ << ": '" << toString(command.getTag()) << "' command for " + << fmqByteCount << " bytes"; + if (mState != StreamDescriptor::State::ERROR && + mState != StreamDescriptor::State::TRANSFERRING && + mState != StreamDescriptor::State::TRANSFER_PAUSED) { + if (bool success = mContext->isMmap() ? writeMmap(&reply) + : write(fmqByteCount, &reply); + !success) { + mState = StreamDescriptor::State::ERROR; + } + std::shared_ptr asyncCallback = mContext->getAsyncCallback(); + if (mState == StreamDescriptor::State::STANDBY || + mState == StreamDescriptor::State::DRAIN_PAUSED || + mState == StreamDescriptor::State::PAUSED) { + if (asyncCallback == nullptr || + mState != StreamDescriptor::State::DRAIN_PAUSED) { + mState = StreamDescriptor::State::PAUSED; + } else { + mState = StreamDescriptor::State::TRANSFER_PAUSED; + } + } else if (mState == StreamDescriptor::State::IDLE || + mState == StreamDescriptor::State::ACTIVE || + (mState == StreamDescriptor::State::DRAINING && + mDrainState != DrainState::EN_SENT)) { + if (asyncCallback == nullptr || reply.fmqByteCount == fmqByteCount) { + mState = StreamDescriptor::State::ACTIVE; + } else { + switchToTransientState(StreamDescriptor::State::TRANSFERRING); + } + } else if (mState == StreamDescriptor::State::DRAINING && + mDrainState == DrainState::EN_SENT) { + // keep mState + } + } else { + populateReplyWrongState(&reply, command); + } + } else { + LOG(WARNING) << __func__ << ": invalid burst byte count: " << fmqByteCount; + } + break; + case Tag::drain: + if (const auto mode = command.get(); + mode == StreamDescriptor::DrainMode::DRAIN_ALL || + mode == StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY) { + if (mState == StreamDescriptor::State::ACTIVE || + mState == StreamDescriptor::State::TRANSFERRING) { + if (::android::status_t status = mDriver->drain(mode); + status == ::android::OK) { + populateReply(&reply, mIsConnected); + if (mState == StreamDescriptor::State::ACTIVE && + mContext->getForceSynchronousDrain()) { + mState = StreamDescriptor::State::IDLE; + } else { + switchToTransientState(StreamDescriptor::State::DRAINING); + mDrainState = mode == StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY + ? DrainState::EN + : DrainState::ALL; + } + } else { + LOG(ERROR) << __func__ << ": drain failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else if (mState == StreamDescriptor::State::TRANSFER_PAUSED) { + mState = StreamDescriptor::State::DRAIN_PAUSED; + populateReply(&reply, mIsConnected); + } else { + populateReplyWrongState(&reply, command); + } + } else { + LOG(WARNING) << __func__ << ": invalid drain mode: " << toString(mode); + } + break; + case Tag::standby: + if (mState == StreamDescriptor::State::IDLE) { + populateReply(&reply, mIsConnected); + if (::android::status_t status = mDriver->standby(); status == ::android::OK) { + mState = StreamDescriptor::State::STANDBY; + } else { + LOG(ERROR) << __func__ << ": standby failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + case Tag::pause: { + std::optional nextState; + switch (mState) { + case StreamDescriptor::State::ACTIVE: + nextState = StreamDescriptor::State::PAUSED; + break; + case StreamDescriptor::State::DRAINING: + nextState = StreamDescriptor::State::DRAIN_PAUSED; + break; + case StreamDescriptor::State::TRANSFERRING: + nextState = StreamDescriptor::State::TRANSFER_PAUSED; + break; + default: + populateReplyWrongState(&reply, command); + } + if (nextState.has_value()) { + if (::android::status_t status = mDriver->pause(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = nextState.value(); + } else { + LOG(ERROR) << __func__ << ": pause failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } + } break; + case Tag::flush: + if (mState == StreamDescriptor::State::PAUSED || + mState == StreamDescriptor::State::DRAIN_PAUSED || + mState == StreamDescriptor::State::TRANSFER_PAUSED) { + if (::android::status_t status = mDriver->flush(); status == ::android::OK) { + populateReply(&reply, mIsConnected); + mState = StreamDescriptor::State::IDLE; + } else { + LOG(ERROR) << __func__ << ": flush failed: " << status; + mState = StreamDescriptor::State::ERROR; + } + } else { + populateReplyWrongState(&reply, command); + } + break; + } + reply.state = mState; + LOG(severity) << __func__ << ": writing reply " << reply.toString(); + if (!mContext->getReplyMQ()->writeBlocking(&reply, 1)) { + LOG(ERROR) << __func__ << ": writing of reply " << reply.toString() << " to MQ failed"; + mState = StreamDescriptor::State::ERROR; + return Status::ABORT; + } + return Status::CONTINUE; +} + +bool StreamOutWorkerLogic::write(size_t clientSize, StreamDescriptor::Reply* reply) { + ATRACE_CALL(); + StreamContext::DataMQ* const dataMQ = mContext->getDataMQ(); + const size_t readByteCount = dataMQ->availableToRead(); + const size_t frameSize = mContext->getFrameSize(); + bool fatal = false; + int32_t latency = mContext->getNominalLatencyMs(); + if (readByteCount > 0 ? dataMQ->read(&mDataBuffer[0], readByteCount) : true) { + const bool isConnected = mIsConnected; + LOG(VERBOSE) << __func__ << ": reading of " << readByteCount << " bytes from data MQ" + << " succeeded; connected? " << isConnected; + // Amount of data that the HAL module is going to actually use. + size_t byteCount = std::min({clientSize, readByteCount, mDataBufferSize}); + if (byteCount >= frameSize && mContext->getForceTransientBurst()) { + // In order to prevent the state machine from going to ACTIVE state, + // simulate partial write. + byteCount -= frameSize; + } + size_t actualFrameCount = 0; + if (isConnected) { + if (::android::status_t status = mDriver->transfer( + mDataBuffer.get(), byteCount / frameSize, &actualFrameCount, &latency); + status != ::android::OK) { + fatal = true; + LOG(ERROR) << __func__ << ": write failed: " << status; + } + auto streamDataProcessor = mContext->getStreamDataProcessor().lock(); + if (streamDataProcessor != nullptr) { + streamDataProcessor->process(mDataBuffer.get(), actualFrameCount * frameSize); + } + } else { + if (mContext->getAsyncCallback() == nullptr) { + usleep(3000); // Simulate blocking transfer delay. + } + actualFrameCount = byteCount / frameSize; + } + const size_t actualByteCount = actualFrameCount * frameSize; + // Frames are consumed and counted regardless of the connection status. + reply->fmqByteCount += actualByteCount; + mContext->advanceFrameCount(actualFrameCount); + populateReply(reply, isConnected); + } else { + LOG(WARNING) << __func__ << ": reading of " << readByteCount + << " bytes of data from MQ failed"; + reply->status = STATUS_NOT_ENOUGH_DATA; + } + reply->latencyMs = latency; + return !fatal; +} + +bool StreamOutWorkerLogic::writeMmap(StreamDescriptor::Reply* reply) { + void* buffer = nullptr; + size_t frameCount = 0; + size_t actualFrameCount = 0; + int32_t latency = mContext->getNominalLatencyMs(); + // use default-initialized parameter values for mmap stream. + if (::android::status_t status = + mDriver->transfer(buffer, frameCount, &actualFrameCount, &latency); + status == ::android::OK) { + populateReply(reply, mIsConnected); + reply->latencyMs = latency; + return true; + } else { + LOG(ERROR) << __func__ << ": transfer failed: " << status; + return false; + } +} + +StreamCommonImpl::~StreamCommonImpl() { + // It is responsibility of the class that implements 'DriverInterface' to call 'cleanupWorker' + // in the destructor. Note that 'cleanupWorker' can not be properly called from this destructor + // because any subclasses have already been destroyed and thus the 'DriverInterface' + // implementation is not valid. Thus, here it can only be asserted whether the subclass has done + // its job. + if (!mWorkerStopIssued && !isClosed()) { + LOG(FATAL) << __func__ << ": the stream implementation must call 'cleanupWorker' " + << "in order to clean up the worker thread."; + } +} + +ndk::ScopedAStatus StreamCommonImpl::initInstance( + const std::shared_ptr& delegate) { + mCommon = ndk::SharedRefBase::make(delegate); + if (!mWorker->start()) { + LOG(ERROR) << __func__ << ": Worker start error: " << mWorker->getError(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + setWorkerThreadPriority(mWorker->getTid()); + getContext().getCommandMQ()->setErrorHandler( + fmqErrorHandler("CommandMQ")); + getContext().getReplyMQ()->setErrorHandler( + fmqErrorHandler("ReplyMQ")); + if (getContext().getDataMQ() != nullptr) { + getContext().getDataMQ()->setErrorHandler( + fmqErrorHandler("DataMQ")); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamCommonImpl::getStreamCommonCommon( + std::shared_ptr* _aidl_return) { + if (!mCommon) { + LOG(FATAL) << __func__ << ": the common interface was not created"; + } + *_aidl_return = mCommon.getInstance(); + LOG(DEBUG) << __func__ << ": returning " << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamCommonImpl::updateHwAvSyncId(int32_t in_hwAvSyncId) { + LOG(DEBUG) << __func__ << ": id " << in_hwAvSyncId; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::getVendorParameters( + const std::vector& in_ids, std::vector* _aidl_return) { + LOG(DEBUG) << __func__ << ": id count: " << in_ids.size(); + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::setVendorParameters( + const std::vector& in_parameters, bool in_async) { + LOG(DEBUG) << __func__ << ": parameters count " << in_parameters.size() + << ", async: " << in_async; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::addEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": null effect"; + } else { + LOG(DEBUG) << __func__ << ": effect Binder" << in_effect->asBinder().get(); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::removeEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": null effect"; + } else { + LOG(DEBUG) << __func__ << ": effect Binder" << in_effect->asBinder().get(); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::close() { + LOG(DEBUG) << __func__; + if (!isClosed()) { + stopAndJoinWorker(); + onClose(mWorker->setClosed()); + return ndk::ScopedAStatus::ok(); + } else { + LOG(ERROR) << __func__ << ": stream was already closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } +} + +ndk::ScopedAStatus StreamCommonImpl::prepareToClose() { + LOG(DEBUG) << __func__; + if (!isClosed()) { + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); +} + +void StreamCommonImpl::cleanupWorker() { + if (!isClosed()) { + LOG(ERROR) << __func__ << ": stream was not closed prior to destruction, resource leak"; + stopAndJoinWorker(); + } +} + +void StreamCommonImpl::setWorkerThreadPriority(pid_t workerTid) { + // FAST workers should be run with a SCHED_FIFO scheduler, however the host process + // might be lacking the capability to request it, thus a failure to set is not an error. + if (auto flags = getContext().getFlags(); + (flags.getTag() == AudioIoFlags::Tag::input && + isBitPositionFlagSet(flags.template get(), + AudioInputFlags::FAST)) || + (flags.getTag() == AudioIoFlags::Tag::output && + (isBitPositionFlagSet(flags.template get(), + AudioOutputFlags::FAST) || + isBitPositionFlagSet(flags.template get(), + AudioOutputFlags::SPATIALIZER)))) { + constexpr int32_t kRTPriorityMin = 1; // SchedulingPolicyService.PRIORITY_MIN (Java). + constexpr int32_t kRTPriorityMax = 3; // SchedulingPolicyService.PRIORITY_MAX (Java). + int priorityBoost = kRTPriorityMax; + if (flags.getTag() == AudioIoFlags::Tag::output && + isBitPositionFlagSet(flags.template get(), + AudioOutputFlags::SPATIALIZER)) { + const int32_t sptPrio = + property_get_int32("audio.spatializer.priority", kRTPriorityMin); + if (sptPrio >= kRTPriorityMin && sptPrio <= kRTPriorityMax) { + priorityBoost = sptPrio; + } else { + LOG(WARNING) << __func__ << ": invalid spatializer priority: " << sptPrio; + return; + } + } + struct sched_param param = { + .sched_priority = priorityBoost, + }; + if (sched_setscheduler(workerTid, SCHED_FIFO | SCHED_RESET_ON_FORK, ¶m) != 0) { + PLOG(WARNING) << __func__ << ": failed to set FIFO scheduler and priority"; + } + } +} + +void StreamCommonImpl::stopAndJoinWorker() { + stopWorker(); + LOG(DEBUG) << __func__ << ": joining the worker thread..."; + mWorker->join(); + LOG(DEBUG) << __func__ << ": worker thread joined"; +} + +void StreamCommonImpl::stopWorker() { + if (auto commandMQ = mContext.getCommandMQ(); commandMQ != nullptr) { + LOG(DEBUG) << __func__ << ": asking the worker to exit..."; + auto cmd = StreamDescriptor::Command::make( + mContext.getInternalCommandCookie() ^ mWorker->getTid()); + // Note: never call 'pause' and 'resume' methods of StreamWorker + // in the HAL implementation. These methods are to be used by + // the client side only. Preventing the worker loop from running + // on the HAL side can cause a deadlock. + if (!commandMQ->writeBlocking(&cmd, 1)) { + LOG(ERROR) << __func__ << ": failed to write exit command to the MQ"; + } + LOG(DEBUG) << __func__ << ": done"; + } + mWorkerStopIssued = true; +} + +ndk::ScopedAStatus StreamCommonImpl::updateMetadataCommon(const Metadata& metadata) { + LOG(DEBUG) << __func__; + if (!isClosed()) { + if (metadata.index() != mMetadata.index()) { + LOG(FATAL) << __func__ << ": changing metadata variant is not allowed"; + } + mMetadata = metadata; + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); +} + +ndk::ScopedAStatus StreamCommonImpl::setConnectedDevices( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) { + mWorker->setIsConnected(!devices.empty()); + mConnectedDevices = devices; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamCommonImpl::setGain(float gain) { + LOG(DEBUG) << __func__ << ": gain " << gain; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamCommonImpl::bluetoothParametersUpdated() { + LOG(DEBUG) << __func__; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +namespace { +static std::map transformMicrophones( + const std::vector& microphones) { + std::map result; + std::transform(microphones.begin(), microphones.end(), std::inserter(result, result.begin()), + [](const auto& mic) { return std::make_pair(mic.device, mic.id); }); + return result; +} +} // namespace + +StreamIn::StreamIn(StreamContext&& context, const std::vector& microphones) + : mContextInstance(std::move(context)), mMicrophones(transformMicrophones(microphones)) { + LOG(DEBUG) << __func__; +} + +void StreamIn::defaultOnClose() { + mContextInstance.reset(); +} + +ndk::ScopedAStatus StreamIn::getActiveMicrophones( + std::vector* _aidl_return) { + std::vector result; + std::vector channelMapping{ + getChannelCount(getContext().getChannelLayout()), + MicrophoneDynamicInfo::ChannelMapping::DIRECT}; + for (auto it = getConnectedDevices().begin(); it != getConnectedDevices().end(); ++it) { + if (auto micIt = mMicrophones.find(*it); micIt != mMicrophones.end()) { + MicrophoneDynamicInfo dynMic; + dynMic.id = micIt->second; + dynMic.channelMapping = channelMapping; + result.push_back(std::move(dynMic)); + } + } + *_aidl_return = std::move(result); + LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamIn::getMicrophoneDirection(MicrophoneDirection* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamIn::setMicrophoneDirection(MicrophoneDirection in_direction) { + LOG(DEBUG) << __func__ << ": direction " << toString(in_direction); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamIn::getMicrophoneFieldDimension(float* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamIn::setMicrophoneFieldDimension(float in_zoom) { + LOG(DEBUG) << __func__ << ": zoom " << in_zoom; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamIn::getHwGain(std::vector* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamIn::setHwGain(const std::vector& in_channelGains) { + LOG(DEBUG) << __func__ << ": gains " << ::android::internal::ToString(in_channelGains); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +StreamInHwGainHelper::StreamInHwGainHelper(const StreamContext* context) + : mChannelCount(getChannelCount(context->getChannelLayout())) {} + +ndk::ScopedAStatus StreamInHwGainHelper::getHwGainImpl(std::vector* _aidl_return) { + if (mHwGains.empty()) { + mHwGains.resize(mChannelCount, 0.0f); + } + *_aidl_return = mHwGains; + LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamInHwGainHelper::setHwGainImpl(const std::vector& in_channelGains) { + LOG(DEBUG) << __func__ << ": gains " << ::android::internal::ToString(in_channelGains); + if (in_channelGains.size() != mChannelCount) { + LOG(ERROR) << __func__ + << ": channel count does not match stream channel count: " << mChannelCount; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + for (float gain : in_channelGains) { + if (gain < StreamIn::HW_GAIN_MIN || gain > StreamIn::HW_GAIN_MAX) { + LOG(ERROR) << __func__ << ": gain value out of range: " << gain; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + mHwGains = in_channelGains; + return ndk::ScopedAStatus::ok(); +} + +StreamOut::StreamOut(StreamContext&& context, const std::optional& offloadInfo) + : mContextInstance(std::move(context)), mOffloadInfo(offloadInfo) { + LOG(DEBUG) << __func__; +} + +void StreamOut::defaultOnClose() { + mContextInstance.reset(); +} + +ndk::ScopedAStatus StreamOut::updateOffloadMetadata( + const AudioOffloadMetadata& in_offloadMetadata) { + LOG(DEBUG) << __func__; + if (isClosed()) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (!mOffloadInfo.has_value()) { + LOG(ERROR) << __func__ << ": not a compressed offload stream"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + if (in_offloadMetadata.sampleRate < 0) { + LOG(ERROR) << __func__ << ": invalid sample rate value: " << in_offloadMetadata.sampleRate; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_offloadMetadata.averageBitRatePerSecond < 0) { + LOG(ERROR) << __func__ + << ": invalid average BPS value: " << in_offloadMetadata.averageBitRatePerSecond; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_offloadMetadata.delayFrames < 0) { + LOG(ERROR) << __func__ + << ": invalid delay frames value: " << in_offloadMetadata.delayFrames; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_offloadMetadata.paddingFrames < 0) { + LOG(ERROR) << __func__ + << ": invalid padding frames value: " << in_offloadMetadata.paddingFrames; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + mOffloadMetadata = in_offloadMetadata; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamOut::getHwVolume(std::vector* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::setHwVolume(const std::vector& in_channelVolumes) { + LOG(DEBUG) << __func__ << ": gains " << ::android::internal::ToString(in_channelVolumes); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::getAudioDescriptionMixLevel(float* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::setAudioDescriptionMixLevel(float in_leveldB) { + LOG(DEBUG) << __func__ << ": description mix level " << in_leveldB; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::getDualMonoMode(AudioDualMonoMode* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::setDualMonoMode(AudioDualMonoMode in_mode) { + LOG(DEBUG) << __func__ << ": dual mono mode " << toString(in_mode); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::getRecommendedLatencyModes( + std::vector* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::setLatencyMode(AudioLatencyMode in_mode) { + LOG(DEBUG) << __func__ << ": latency mode " << toString(in_mode); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) { + LOG(DEBUG) << __func__; + (void)_aidl_return; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) { + LOG(DEBUG) << __func__ << ": " << in_playbackRate.toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamOut::selectPresentation(int32_t in_presentationId, int32_t in_programId) { + LOG(DEBUG) << __func__ << ": presentationId " << in_presentationId << ", programId " + << in_programId; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +StreamOutHwVolumeHelper::StreamOutHwVolumeHelper(const StreamContext* context) + : mChannelCount(getChannelCount(context->getChannelLayout())) {} + +ndk::ScopedAStatus StreamOutHwVolumeHelper::getHwVolumeImpl(std::vector* _aidl_return) { + if (mHwVolumes.empty()) { + mHwVolumes.resize(mChannelCount, 0.0f); + } + *_aidl_return = mHwVolumes; + LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamOutHwVolumeHelper::setHwVolumeImpl( + const std::vector& in_channelVolumes) { + LOG(DEBUG) << __func__ << ": volumes " << ::android::internal::ToString(in_channelVolumes); + if (in_channelVolumes.size() != mChannelCount) { + LOG(ERROR) << __func__ + << ": channel count does not match stream channel count: " << mChannelCount; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + for (float volume : in_channelVolumes) { + if (volume < StreamOut::HW_VOLUME_MIN || volume > StreamOut::HW_VOLUME_MAX) { + LOG(ERROR) << __func__ << ": volume value out of range: " << volume; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + } + mHwVolumes = in_channelVolumes; + return ndk::ScopedAStatus::ok(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/Telephony.cpp b/audio/Telephony.cpp new file mode 100644 index 0000000..d9da39f --- /dev/null +++ b/audio/Telephony.cpp @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Telephony" +#include +#include + +#include + +#include "core-impl/Telephony.h" + +using aidl::android::hardware::audio::common::isValidAudioMode; +using aidl::android::media::audio::common::AudioMode; +using aidl::android::media::audio::common::Boolean; +using aidl::android::media::audio::common::Float; + +namespace aidl::android::hardware::audio::core { + +Telephony::Telephony() { + mTelecomConfig.voiceVolume = Float{TelecomConfig::VOICE_VOLUME_MAX}; + mTelecomConfig.ttyMode = TelecomConfig::TtyMode::OFF; + mTelecomConfig.isHacEnabled = Boolean{false}; +} + +ndk::ScopedAStatus Telephony::getSupportedAudioModes(std::vector* _aidl_return) { + *_aidl_return = mSupportedAudioModes; + LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Telephony::switchAudioMode(AudioMode in_mode) { + if (!isValidAudioMode(in_mode)) { + LOG(ERROR) << __func__ << ": invalid mode " << toString(in_mode); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (std::find(mSupportedAudioModes.begin(), mSupportedAudioModes.end(), in_mode) != + mSupportedAudioModes.end()) { + LOG(DEBUG) << __func__ << ": " << toString(in_mode); + return ndk::ScopedAStatus::ok(); + } + LOG(ERROR) << __func__ << ": unsupported mode " << toString(in_mode); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Telephony::setTelecomConfig(const TelecomConfig& in_config, + TelecomConfig* _aidl_return) { + if (in_config.voiceVolume.has_value() && + (in_config.voiceVolume.value().value < TelecomConfig::VOICE_VOLUME_MIN || + in_config.voiceVolume.value().value > TelecomConfig::VOICE_VOLUME_MAX)) { + LOG(ERROR) << __func__ + << ": voice volume value is invalid: " << in_config.voiceVolume.value().value; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (in_config.voiceVolume.has_value()) { + mTelecomConfig.voiceVolume = in_config.voiceVolume; + } + if (in_config.ttyMode != TelecomConfig::TtyMode::UNSPECIFIED) { + mTelecomConfig.ttyMode = in_config.ttyMode; + } + if (in_config.isHacEnabled.has_value()) { + mTelecomConfig.isHacEnabled = in_config.isHacEnabled; + } + *_aidl_return = mTelecomConfig; + LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning " + << _aidl_return->toString(); + return ndk::ScopedAStatus::ok(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/XsdcConversion.cpp b/audio/XsdcConversion.cpp new file mode 100644 index 0000000..5845903 --- /dev/null +++ b/audio/XsdcConversion.cpp @@ -0,0 +1,838 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#define LOG_TAG "AHAL_Config" +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include + +#include "core-impl/XmlConverter.h" +#include "core-impl/XsdcConversion.h" + +using aidl::android::hardware::audio::common::iequals; +using aidl::android::hardware::audio::common::isValidAudioMode; +using aidl::android::hardware::audio::common::kValidAudioModes; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioContentType; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioGain; +using aidl::android::media::audio::common::AudioHalCapCriterion; +using aidl::android::media::audio::common::AudioHalCapCriterionType; +using aidl::android::media::audio::common::AudioHalCapCriterionV2; +using aidl::android::media::audio::common::AudioHalVolumeCurve; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioMode; +using aidl::android::media::audio::common::AudioPolicyForceUse; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortDeviceExt; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::AudioPortMixExt; +using aidl::android::media::audio::common::AudioProfile; +using aidl::android::media::audio::common::AudioSource; +using aidl::android::media::audio::common::AudioStreamType; +using aidl::android::media::audio::common::AudioUsage; +using android::BAD_VALUE; +using android::base::unexpected; +using android::utilities::convertTo; +using ndk::enum_range; + +namespace ap_xsd = android::audio::policy::configuration; +namespace eng_xsd = android::audio::policy::engine::configuration; + +namespace aidl::android::hardware::audio::core::internal { + +static constexpr const char kXsdcForceConfigForCommunication[] = "ForceUseForCommunication"; +static constexpr const char kXsdcForceConfigForMedia[] = "ForceUseForMedia"; +static constexpr const char kXsdcForceConfigForRecord[] = "ForceUseForRecord"; +static constexpr const char kXsdcForceConfigForDock[] = "ForceUseForDock"; +static constexpr const char kXsdcForceConfigForSystem[] = "ForceUseForSystem"; +static constexpr const char kXsdcForceConfigForHdmiSystemAudio[] = "ForceUseForHdmiSystemAudio"; +static constexpr const char kXsdcForceConfigForEncodedSurround[] = "ForceUseForEncodedSurround"; +static constexpr const char kXsdcForceConfigForVibrateRinging[] = "ForceUseForVibrateRinging"; + +inline ConversionResult assertNonEmpty(const std::string& s) { + if (s.empty()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: " + << " empty string is not valid."; + return unexpected(BAD_VALUE); + } + return s; +} + +#define NON_EMPTY_STRING_OR_FATAL(s) VALUE_OR_FATAL(assertNonEmpty(s)) + +ConversionResult convertAudioFlagsToAidl( + const std::vector& xsdcFlagTypeVec) { + int legacyFlagMask = 0; + for (const eng_xsd::FlagType& xsdcFlagType : xsdcFlagTypeVec) { + if (xsdcFlagType != eng_xsd::FlagType::AUDIO_FLAG_NONE) { + audio_flags_mask_t legacyFlag = AUDIO_FLAG_NONE; + if (!::android::AudioFlagConverter::fromString(eng_xsd::toString(xsdcFlagType), + legacyFlag)) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " + << eng_xsd::toString(xsdcFlagType) << " is not a valid flag."; + return unexpected(BAD_VALUE); + } + legacyFlagMask |= static_cast(legacyFlag); + } + } + ConversionResult result = legacy2aidl_audio_flags_mask_t_int32_t_mask( + static_cast(legacyFlagMask)); + if (!result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyFlagMask + << " has invalid flag(s)."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertAudioStreamTypeToAidl(const eng_xsd::Stream& xsdcStream) { + audio_stream_type_t legacyStreamType; + if (!::android::StreamTypeConverter::fromString(eng_xsd::toString(xsdcStream), + legacyStreamType)) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " << eng_xsd::toString(xsdcStream) + << " is not a valid audio stream type."; + return unexpected(BAD_VALUE); + } + ConversionResult result = + legacy2aidl_audio_stream_type_t_AudioStreamType(legacyStreamType); + if (!result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyStreamType + << " is not a valid audio stream type."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertAudioSourceToAidl( + const eng_xsd::SourceEnumType& xsdcSourceType) { + audio_source_t legacySourceType; + if (!::android::SourceTypeConverter::fromString(eng_xsd::toString(xsdcSourceType), + legacySourceType)) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " + << eng_xsd::toString(xsdcSourceType) << " is not a valid audio source."; + return unexpected(BAD_VALUE); + } + ConversionResult result = legacy2aidl_audio_source_t_AudioSource(legacySourceType); + if (!result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacySourceType + << " is not a valid audio source."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertAudioContentTypeToAidl( + const eng_xsd::ContentType& xsdcContentType) { + audio_content_type_t legacyContentType; + if (!::android::AudioContentTypeConverter::fromString(eng_xsd::toString(xsdcContentType), + legacyContentType)) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " + << eng_xsd::toString(xsdcContentType) << " is not a valid audio content type."; + return unexpected(BAD_VALUE); + } + ConversionResult result = + legacy2aidl_audio_content_type_t_AudioContentType(legacyContentType); + if (!result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyContentType + << " is not a valid audio content type."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertAudioUsageToAidl(const eng_xsd::UsageEnumType& xsdcUsage) { + audio_usage_t legacyUsage; + if (!::android::UsageTypeConverter::fromString(eng_xsd::toString(xsdcUsage), legacyUsage)) { + LOG(ERROR) << __func__ << " Review Audio Policy config, not a valid audio usage."; + return unexpected(BAD_VALUE); + } + ConversionResult result = legacy2aidl_audio_usage_t_AudioUsage(legacyUsage); + if (!result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config, not a valid audio usage."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertAudioFormatToAidl(const std::string& xsdcFormat) { + audio_format_t legacyFormat = ::android::formatFromString(xsdcFormat, AUDIO_FORMAT_DEFAULT); + ConversionResult result = + legacy2aidl_audio_format_t_AudioFormatDescription(legacyFormat); + if ((legacyFormat == AUDIO_FORMAT_DEFAULT && xsdcFormat.compare("AUDIO_FORMAT_DEFAULT") != 0) || + !result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: " << xsdcFormat + << " is not a valid audio format."; + return unexpected(BAD_VALUE); + } + return result; +} + +std::unordered_set getAttachedDevices(const ap_xsd::Modules::Module& moduleConfig) { + std::unordered_set attachedDeviceSet; + if (moduleConfig.hasAttachedDevices()) { + for (const ap_xsd::AttachedDevices& attachedDevices : moduleConfig.getAttachedDevices()) { + if (attachedDevices.hasItem()) { + attachedDeviceSet.insert(attachedDevices.getItem().begin(), + attachedDevices.getItem().end()); + } + } + } + return attachedDeviceSet; +} + +ConversionResult convertDeviceTypeToAidl(const std::string& xType) { + audio_devices_t legacyDeviceType = AUDIO_DEVICE_NONE; + ::android::DeviceConverter::fromString(xType, legacyDeviceType); + ConversionResult result = + legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyDeviceType); + if ((legacyDeviceType == AUDIO_DEVICE_NONE) || !result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: " << xType + << " is not a valid device type."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult createAudioDevice( + const ap_xsd::DevicePorts::DevicePort& xDevicePort) { + AudioDevice device = { + .type = VALUE_OR_FATAL(convertDeviceTypeToAidl(xDevicePort.getType())), + .address = xDevicePort.hasAddress() + ? AudioDeviceAddress::make( + xDevicePort.getAddress()) + : AudioDeviceAddress{}}; + if (device.type.type == AudioDeviceType::IN_MICROPHONE && device.type.connection.empty()) { + device.address = "bottom"; + } else if (device.type.type == AudioDeviceType::IN_MICROPHONE_BACK && + device.type.connection.empty()) { + device.address = "back"; + } + return device; +} + +ConversionResult createAudioPortExt( + const ap_xsd::DevicePorts::DevicePort& xDevicePort, + const std::string& xDefaultOutputDevice) { + AudioPortDeviceExt deviceExt = { + .device = VALUE_OR_FATAL(createAudioDevice(xDevicePort)), + .flags = (xDevicePort.getTagName() == xDefaultOutputDevice) + ? 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE + : 0, + .encodedFormats = + xDevicePort.hasEncodedFormats() + ? VALUE_OR_FATAL( + (convertCollectionToAidl( + xDevicePort.getEncodedFormats(), + &convertAudioFormatToAidl))) + : std::vector{}, + }; + return AudioPortExt::make(deviceExt); +} + +ConversionResult createAudioPortExt(const ap_xsd::MixPorts::MixPort& xMixPort) { + AudioPortMixExt mixExt = { + .maxOpenStreamCount = + xMixPort.hasMaxOpenCount() ? static_cast(xMixPort.getMaxOpenCount()) : 0, + .maxActiveStreamCount = xMixPort.hasMaxActiveCount() + ? static_cast(xMixPort.getMaxActiveCount()) + : 1, + .recommendedMuteDurationMs = + xMixPort.hasRecommendedMuteDurationMs() + ? static_cast(xMixPort.getRecommendedMuteDurationMs()) + : 0}; + return AudioPortExt::make(mixExt); +} + +ConversionResult convertGainModeToAidl(const std::vector& gainModeVec) { + int gainModeMask = 0; + for (const ap_xsd::AudioGainMode& gainMode : gainModeVec) { + audio_gain_mode_t legacyGainMode; + if (::android::GainModeConverter::fromString(ap_xsd::toString(gainMode), legacyGainMode)) { + gainModeMask |= static_cast(legacyGainMode); + } + } + return gainModeMask; +} + +ConversionResult convertChannelMaskToAidl( + const ap_xsd::AudioChannelMask& xChannelMask) { + std::string xChannelMaskLiteral = ap_xsd::toString(xChannelMask); + audio_channel_mask_t legacyChannelMask = ::android::channelMaskFromString(xChannelMaskLiteral); + ConversionResult result = + legacy2aidl_audio_channel_mask_t_AudioChannelLayout( + legacyChannelMask, + /* isInput= */ xChannelMaskLiteral.find("AUDIO_CHANNEL_IN_") == 0); + if ((legacyChannelMask == AUDIO_CHANNEL_INVALID) || !result.ok()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: " << xChannelMaskLiteral + << " is not a valid audio channel mask."; + return unexpected(BAD_VALUE); + } + return result; +} + +ConversionResult convertGainToAidl(const ap_xsd::Gains::Gain& xGain) { + return AudioGain{ + .mode = VALUE_OR_FATAL(convertGainModeToAidl(xGain.getMode())), + .channelMask = + xGain.hasChannel_mask() + ? VALUE_OR_FATAL(convertChannelMaskToAidl(xGain.getChannel_mask())) + : AudioChannelLayout{}, + .minValue = xGain.hasMinValueMB() ? xGain.getMinValueMB() : 0, + .maxValue = xGain.hasMaxValueMB() ? xGain.getMaxValueMB() : 0, + .defaultValue = xGain.hasDefaultValueMB() ? xGain.getDefaultValueMB() : 0, + .stepValue = xGain.hasStepValueMB() ? xGain.getStepValueMB() : 0, + .minRampMs = xGain.hasMinRampMs() ? xGain.getMinRampMs() : 0, + .maxRampMs = xGain.hasMaxRampMs() ? xGain.getMaxRampMs() : 0, + .useForVolume = xGain.hasUseForVolume() ? xGain.getUseForVolume() : false, + }; +} + +ConversionResult convertAudioProfileToAidl(const ap_xsd::Profile& xProfile) { + return AudioProfile{ + .format = xProfile.hasFormat() + ? VALUE_OR_FATAL(convertAudioFormatToAidl(xProfile.getFormat())) + : AudioFormatDescription{}, + .channelMasks = + xProfile.hasChannelMasks() + ? VALUE_OR_FATAL((convertCollectionToAidl( + xProfile.getChannelMasks(), &convertChannelMaskToAidl))) + : std::vector{}, + .sampleRates = xProfile.hasSamplingRates() + ? VALUE_OR_FATAL((convertCollectionToAidl( + xProfile.getSamplingRates(), + [](const int64_t x) -> int { return x; }))) + : std::vector{}}; +} + +ConversionResult convertIoFlagsToAidl( + const std::vector& flags, const ap_xsd::Role role, + bool flagsForMixPort) { + int legacyFlagMask = 0; + if ((role == ap_xsd::Role::sink && flagsForMixPort) || + (role == ap_xsd::Role::source && !flagsForMixPort)) { + for (const ap_xsd::AudioInOutFlag& flag : flags) { + audio_input_flags_t legacyFlag; + if (::android::InputFlagConverter::fromString(ap_xsd::toString(flag), legacyFlag)) { + legacyFlagMask |= static_cast(legacyFlag); + } + } + return AudioIoFlags::make( + VALUE_OR_FATAL(legacy2aidl_audio_input_flags_t_int32_t_mask( + static_cast(legacyFlagMask)))); + } else { + for (const ap_xsd::AudioInOutFlag& flag : flags) { + audio_output_flags_t legacyFlag; + if (::android::OutputFlagConverter::fromString(ap_xsd::toString(flag), legacyFlag)) { + legacyFlagMask |= static_cast(legacyFlag); + } + } + return AudioIoFlags::make( + VALUE_OR_FATAL(legacy2aidl_audio_output_flags_t_int32_t_mask( + static_cast(legacyFlagMask)))); + } +} + +ConversionResult convertDevicePortToAidl( + const ap_xsd::DevicePorts::DevicePort& xDevicePort, const std::string& xDefaultOutputDevice, + int32_t& nextPortId) { + return AudioPort{ + .id = nextPortId++, + .name = NON_EMPTY_STRING_OR_FATAL(xDevicePort.getTagName()), + .profiles = VALUE_OR_FATAL((convertCollectionToAidl( + xDevicePort.getProfile(), convertAudioProfileToAidl))), + .flags = VALUE_OR_FATAL(convertIoFlagsToAidl({}, xDevicePort.getRole(), false)), + .gains = VALUE_OR_FATAL( + (convertWrappedCollectionToAidl( + xDevicePort.getGains(), &ap_xsd::Gains::getGain, convertGainToAidl))), + + .ext = VALUE_OR_FATAL(createAudioPortExt(xDevicePort, xDefaultOutputDevice))}; +} + +ConversionResult> convertDevicePortsInModuleToAidl( + const ap_xsd::Modules::Module& xModuleConfig, int32_t& nextPortId) { + std::vector audioPortVec; + std::vector xDevicePortsVec = xModuleConfig.getDevicePorts(); + if (xDevicePortsVec.size() > 1) { + LOG(ERROR) << __func__ << "Having multiple '' elements is not allowed, found: " + << xDevicePortsVec.size(); + return unexpected(BAD_VALUE); + } + if (!xDevicePortsVec.empty()) { + const std::string xDefaultOutputDevice = xModuleConfig.hasDefaultOutputDevice() + ? xModuleConfig.getDefaultOutputDevice() + : ""; + audioPortVec.reserve(xDevicePortsVec[0].getDevicePort().size()); + for (const ap_xsd::DevicePorts& xDevicePortsType : xDevicePortsVec) { + for (const ap_xsd::DevicePorts::DevicePort& xDevicePort : + xDevicePortsType.getDevicePort()) { + audioPortVec.push_back(VALUE_OR_FATAL( + convertDevicePortToAidl(xDevicePort, xDefaultOutputDevice, nextPortId))); + } + } + } + const std::unordered_set xAttachedDeviceSet = getAttachedDevices(xModuleConfig); + for (const auto& port : audioPortVec) { + const auto& devicePort = port.ext.get(); + if (xAttachedDeviceSet.count(port.name) != devicePort.device.type.connection.empty()) { + LOG(ERROR) << __func__ << ": Review Audio Policy config: " + << "list is incorrect or devicePort \"" << port.name + << "\" type= " << devicePort.device.type.toString() << " is incorrect."; + return unexpected(BAD_VALUE); + } + } + return audioPortVec; +} + +ConversionResult convertMixPortToAidl(const ap_xsd::MixPorts::MixPort& xMixPort, + int32_t& nextPortId) { + return AudioPort{ + .id = nextPortId++, + .name = NON_EMPTY_STRING_OR_FATAL(xMixPort.getName()), + .profiles = VALUE_OR_FATAL((convertCollectionToAidl( + xMixPort.getProfile(), convertAudioProfileToAidl))), + .flags = xMixPort.hasFlags() + ? VALUE_OR_FATAL(convertIoFlagsToAidl(xMixPort.getFlags(), + xMixPort.getRole(), true)) + : VALUE_OR_FATAL(convertIoFlagsToAidl({}, xMixPort.getRole(), true)), + .gains = VALUE_OR_FATAL( + (convertWrappedCollectionToAidl( + xMixPort.getGains(), &ap_xsd::Gains::getGain, &convertGainToAidl))), + .ext = VALUE_OR_FATAL(createAudioPortExt(xMixPort)), + }; +} + +ConversionResult> convertMixPortsInModuleToAidl( + const ap_xsd::Modules::Module& xModuleConfig, int32_t& nextPortId) { + std::vector audioPortVec; + std::vector xMixPortsVec = xModuleConfig.getMixPorts(); + if (xMixPortsVec.size() > 1) { + LOG(ERROR) << __func__ << "Having multiple '' elements is not allowed, found: " + << xMixPortsVec.size(); + return unexpected(BAD_VALUE); + } + if (!xMixPortsVec.empty()) { + audioPortVec.reserve(xMixPortsVec[0].getMixPort().size()); + for (const ap_xsd::MixPorts& xMixPortsType : xMixPortsVec) { + for (const ap_xsd::MixPorts::MixPort& xMixPort : xMixPortsType.getMixPort()) { + audioPortVec.push_back(VALUE_OR_FATAL(convertMixPortToAidl(xMixPort, nextPortId))); + } + } + } + return audioPortVec; +} + +ConversionResult getSinkPortId(const ap_xsd::Routes::Route& xRoute, + const std::unordered_map& portMap) { + auto portMapIter = portMap.find(xRoute.getSink()); + if (portMapIter == portMap.end()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: audio route" + << "has sink: " << xRoute.getSink() + << " which is neither a device port nor mix port."; + return unexpected(BAD_VALUE); + } + return portMapIter->second; +} + +ConversionResult> getSourcePortIds( + const ap_xsd::Routes::Route& xRoute, + const std::unordered_map& portMap) { + std::vector sourcePortIds; + for (const std::string& rawSource : ::android::base::Split(xRoute.getSources(), ",")) { + const std::string source = ::android::base::Trim(rawSource); + auto portMapIter = portMap.find(source); + if (portMapIter == portMap.end()) { + LOG(ERROR) << __func__ << " Review Audio Policy config: audio route" + << "has source \"" << source + << "\" which is neither a device port nor mix port."; + return unexpected(BAD_VALUE); + } + sourcePortIds.push_back(portMapIter->second); + } + return sourcePortIds; +} + +ConversionResult convertRouteToAidl(const ap_xsd::Routes::Route& xRoute, + const std::vector& aidlAudioPorts) { + std::unordered_map portMap; + for (const AudioPort& port : aidlAudioPorts) { + portMap.insert({port.name, port.id}); + } + return AudioRoute{.sourcePortIds = VALUE_OR_FATAL(getSourcePortIds(xRoute, portMap)), + .sinkPortId = VALUE_OR_FATAL(getSinkPortId(xRoute, portMap)), + .isExclusive = (xRoute.getType() == ap_xsd::MixType::mux)}; +} + +ConversionResult> convertRoutesInModuleToAidl( + const ap_xsd::Modules::Module& xModuleConfig, + const std::vector& aidlAudioPorts) { + std::vector audioRouteVec; + std::vector xRoutesVec = xModuleConfig.getRoutes(); + if (!xRoutesVec.empty()) { + /* + * xRoutesVec likely only contains one element; that is, it's + * likely that all ap_xsd::Routes::MixPort types that we need to convert + * are inside of xRoutesVec[0]. + */ + audioRouteVec.reserve(xRoutesVec[0].getRoute().size()); + for (const ap_xsd::Routes& xRoutesType : xRoutesVec) { + for (const ap_xsd::Routes::Route& xRoute : xRoutesType.getRoute()) { + audioRouteVec.push_back(VALUE_OR_FATAL(convertRouteToAidl(xRoute, aidlAudioPorts))); + } + } + } + return audioRouteVec; +} + +ConversionResult> convertModuleConfigToAidl( + const ap_xsd::Modules::Module& xModuleConfig) { + auto result = std::make_unique(); + auto& aidlModuleConfig = *result; + std::vector devicePorts = VALUE_OR_FATAL( + convertDevicePortsInModuleToAidl(xModuleConfig, aidlModuleConfig.nextPortId)); + + // The XML config does not specify the default input device. + // Assign the first attached input device as the default. + for (auto& port : devicePorts) { + if (port.flags.getTag() != AudioIoFlags::input) continue; + auto& deviceExt = port.ext.get(); + if (!deviceExt.device.type.connection.empty()) continue; + deviceExt.flags |= 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE; + break; + } + + std::vector mixPorts = VALUE_OR_FATAL( + convertMixPortsInModuleToAidl(xModuleConfig, aidlModuleConfig.nextPortId)); + aidlModuleConfig.ports.reserve(devicePorts.size() + mixPorts.size()); + aidlModuleConfig.ports.insert(aidlModuleConfig.ports.end(), devicePorts.begin(), + devicePorts.end()); + aidlModuleConfig.ports.insert(aidlModuleConfig.ports.end(), mixPorts.begin(), mixPorts.end()); + + aidlModuleConfig.routes = + VALUE_OR_FATAL(convertRoutesInModuleToAidl(xModuleConfig, aidlModuleConfig.ports)); + return result; +} + +ConversionResult convertTelephonyModeToAidl(const std::string& xsdcModeCriterionType) { + const auto it = std::find_if(kValidAudioModes.begin(), kValidAudioModes.end(), + [&xsdcModeCriterionType](const auto& mode) { + return toString(mode) == xsdcModeCriterionType; + }); + if (it == kValidAudioModes.end()) { + LOG(ERROR) << __func__ << " invalid mode " << xsdcModeCriterionType; + return unexpected(BAD_VALUE); + } + return *it; +} + +ConversionResult convertDeviceAddressToAidl(const std::string& xsdcAddress) { + return AudioDeviceAddress::make(xsdcAddress); +} + +ConversionResult getCriterionTypeByName( + const std::string& name, + const std::vector& xsdcCriterionTypesVec) { + for (const auto& xsdCriterionTypes : xsdcCriterionTypesVec) { + for (const auto& xsdcCriterionType : xsdCriterionTypes.getCriterion_type()) { + if (xsdcCriterionType.getName() == name) { + return xsdcCriterionType; + } + } + } + LOG(ERROR) << __func__ << " failed to find criterion type " << name; + return unexpected(BAD_VALUE); +} + +ConversionResult>> +convertCapCriteriaCollectionToAidl( + const std::vector& xsdcCriteriaVec, + const std::vector& xsdcCriterionTypesVec) { + std::vector> resultAidlCriterionVec; + if (xsdcCriteriaVec.empty() || xsdcCriterionTypesVec.empty()) { + LOG(ERROR) << __func__ << " empty criteria/criterionTypes"; + return unexpected(BAD_VALUE); + } + for (const auto& xsdCriteria : xsdcCriteriaVec) { + for (const auto& xsdcCriterion : xsdCriteria.getCriterion()) { + resultAidlCriterionVec.push_back( + std::optional(VALUE_OR_FATAL( + convertCapCriterionV2ToAidl(xsdcCriterion, xsdcCriterionTypesVec)))); + } + } + return resultAidlCriterionVec; +} + +ConversionResult> convertDevicesToAidl( + const eng_xsd::CriterionTypeType& xsdcDeviceCriterionType) { + if (xsdcDeviceCriterionType.getValues().empty()) { + LOG(ERROR) << __func__ << " no values provided"; + return unexpected(BAD_VALUE); + } + std::vector aidlDevices; + for (eng_xsd::ValuesType xsdcValues : xsdcDeviceCriterionType.getValues()) { + aidlDevices.reserve(xsdcValues.getValue().size()); + for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) { + if (!xsdcValue.hasAndroid_type()) { + LOG(ERROR) << __func__ << " empty android type"; + return unexpected(BAD_VALUE); + } + uint32_t integerValue; + if (!convertTo(xsdcValue.getAndroid_type(), integerValue)) { + LOG(ERROR) << __func__ << " failed to convert android type " + << xsdcValue.getAndroid_type(); + return unexpected(BAD_VALUE); + } + aidlDevices.push_back( + VALUE_OR_RETURN(legacy2aidl_audio_devices_t_AudioDeviceDescription( + static_cast(integerValue)))); + } + } + return aidlDevices; +} + +ConversionResult> convertDeviceAddressesToAidl( + const eng_xsd::CriterionTypeType& xsdcDeviceAddressesCriterionType) { + if (xsdcDeviceAddressesCriterionType.getValues().empty()) { + LOG(ERROR) << __func__ << " no values provided"; + return unexpected(BAD_VALUE); + } + std::vector aidlDeviceAddresses; + for (eng_xsd::ValuesType xsdcValues : xsdcDeviceAddressesCriterionType.getValues()) { + aidlDeviceAddresses.reserve(xsdcValues.getValue().size()); + for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) { + aidlDeviceAddresses.push_back( + AudioDeviceAddress::make(xsdcValue.getLiteral())); + } + } + return aidlDeviceAddresses; +} + +ConversionResult convertAudioModeToAidl(const std::string& xsdcAudioModeType) { + const auto it = std::find_if(enum_range().begin(), enum_range().end(), + [&](const auto v) { return toString(v) == xsdcAudioModeType; }); + if (it == enum_range().end()) { + LOG(ERROR) << __func__ << " invalid audio mode " << xsdcAudioModeType; + return unexpected(BAD_VALUE); + } + return *it; +} + +ConversionResult> convertTelephonyModesToAidl( + const eng_xsd::CriterionTypeType& xsdcTelephonyModeCriterionType) { + if (xsdcTelephonyModeCriterionType.getValues().empty()) { + LOG(ERROR) << __func__ << " no values provided"; + return unexpected(BAD_VALUE); + } + std::vector aidlAudioModes; + for (eng_xsd::ValuesType xsdcValues : xsdcTelephonyModeCriterionType.getValues()) { + aidlAudioModes.reserve(xsdcValues.getValue().size()); + for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) { + aidlAudioModes.push_back( + VALUE_OR_RETURN(convertAudioModeToAidl(xsdcValue.getLiteral()))); + } + } + return aidlAudioModes; +} + +ConversionResult> convertForceUseConfigsToAidl( + const std::string& criterionValue, + const eng_xsd::CriterionTypeType& xsdcForcedConfigCriterionType) { + if (xsdcForcedConfigCriterionType.getValues().empty()) { + LOG(ERROR) << __func__ << " no values provided"; + return unexpected(BAD_VALUE); + } + std::vector aidlForcedConfigs; + for (eng_xsd::ValuesType xsdcValues : xsdcForcedConfigCriterionType.getValues()) { + aidlForcedConfigs.reserve(xsdcValues.getValue().size()); + for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) { + aidlForcedConfigs.push_back( + VALUE_OR_RETURN(convertForceUseToAidl(criterionValue, xsdcValue.getLiteral()))); + } + } + return aidlForcedConfigs; +} + +template +ConversionResult convertForceUseForcedConfigToAidl( + const std::string& xsdcForcedConfigCriterionType) { + const auto it = std::find_if(enum_range().begin(), enum_range().end(), [&](const auto v) { + return toString(v) == xsdcForcedConfigCriterionType; + }); + if (it == enum_range().end()) { + LOG(ERROR) << __func__ << " invalid forced config " << xsdcForcedConfigCriterionType; + return unexpected(BAD_VALUE); + } + return *it; +} + +ConversionResult convertForceUseToAidl(const std::string& xsdcCriterionName, + const std::string& xsdcCriterionValue) { + if (!fastcmp(xsdcCriterionName.c_str(), kXsdcForceConfigForCommunication, + strlen(kXsdcForceConfigForCommunication))) { + const auto deviceCategory = VALUE_OR_RETURN( + convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(deviceCategory); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForMedia, + strlen(kXsdcForceConfigForMedia))) { + const auto deviceCategory = VALUE_OR_RETURN( + convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(deviceCategory); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForRecord, + strlen(kXsdcForceConfigForRecord))) { + const auto deviceCategory = VALUE_OR_RETURN( + convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(deviceCategory); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForDock, + strlen(kXsdcForceConfigForDock))) { + const auto dockType = + VALUE_OR_RETURN(convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(dockType); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForSystem, + strlen(kXsdcForceConfigForSystem))) { + return AudioPolicyForceUse::make(xsdcCriterionValue == + "SYSTEM_ENFORCED"); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForHdmiSystemAudio, + strlen(kXsdcForceConfigForHdmiSystemAudio))) { + return AudioPolicyForceUse::make( + xsdcCriterionValue == "HDMI_SYSTEM_AUDIO_ENFORCED"); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForEncodedSurround, + strlen(kXsdcForceConfigForEncodedSurround))) { + const auto encodedSurround = VALUE_OR_RETURN( + convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(encodedSurround); + } + if (!fasticmp(xsdcCriterionName.c_str(), kXsdcForceConfigForVibrateRinging, + strlen(kXsdcForceConfigForVibrateRinging))) { + const auto deviceCategory = VALUE_OR_RETURN( + convertForceUseForcedConfigToAidl( + xsdcCriterionValue)); + return AudioPolicyForceUse::make(deviceCategory); + } + LOG(ERROR) << __func__ << " unrecognized force use " << xsdcCriterionName; + return unexpected(BAD_VALUE); +} + +ConversionResult convertCapCriterionV2ToAidl( + const eng_xsd::CriterionType& xsdcCriterion, + const std::vector& xsdcCriterionTypesVec) { + eng_xsd::CriterionTypeType xsdcCriterionType = + VALUE_OR_RETURN(getCriterionTypeByName(xsdcCriterion.getType(), xsdcCriterionTypesVec)); + std::string defaultLiteralValue = + xsdcCriterion.has_default() ? xsdcCriterion.get_default() : ""; + using Tag = AudioHalCapCriterionV2::Tag; + if (iequals(xsdcCriterion.getName(), toString(Tag::availableInputDevices))) { + return AudioHalCapCriterionV2::make( + VALUE_OR_RETURN(convertDevicesToAidl(xsdcCriterionType))); + } + if (iequals(xsdcCriterion.getName(), toString(Tag::availableOutputDevices))) { + return AudioHalCapCriterionV2::make( + VALUE_OR_RETURN(convertDevicesToAidl(xsdcCriterionType))); + } + if (iequals(xsdcCriterion.getName(), toString(Tag::availableInputDevicesAddresses))) { + return AudioHalCapCriterionV2::make( + VALUE_OR_RETURN(convertDeviceAddressesToAidl(xsdcCriterionType))); + } + if (iequals(xsdcCriterion.getName(), toString(Tag::availableOutputDevicesAddresses))) { + return AudioHalCapCriterionV2::make( + VALUE_OR_RETURN(convertDeviceAddressesToAidl(xsdcCriterionType))); + } + if (iequals(xsdcCriterion.getName(), toString(Tag::telephonyMode))) { + return AudioHalCapCriterionV2::make( + VALUE_OR_RETURN(convertTelephonyModesToAidl(xsdcCriterionType))); + } + if (!fastcmp(xsdcCriterion.getName().c_str(), kXsdcForceConfigForUse, + strlen(kXsdcForceConfigForUse))) { + return AudioHalCapCriterionV2::make(VALUE_OR_RETURN( + convertForceUseConfigsToAidl(xsdcCriterion.getName(), xsdcCriterionType))); + } + LOG(ERROR) << __func__ << " unrecognized criterion " << xsdcCriterion.getName(); + return unexpected(BAD_VALUE); +} + +ConversionResult convertCapCriterionToAidl( + const eng_xsd::CriterionType& xsdcCriterion) { + AudioHalCapCriterion aidlCapCriterion; + aidlCapCriterion.name = xsdcCriterion.getName(); + aidlCapCriterion.criterionTypeName = xsdcCriterion.getType(); + aidlCapCriterion.defaultLiteralValue = + xsdcCriterion.has_default() ? xsdcCriterion.get_default() : ""; + return aidlCapCriterion; +} + +ConversionResult convertCurvePointToAidl( + const std::string& xsdcCurvePoint) { + AudioHalVolumeCurve::CurvePoint aidlCurvePoint{}; + if ((sscanf(xsdcCurvePoint.c_str(), "%" SCNd8 ",%d", &aidlCurvePoint.index, + &aidlCurvePoint.attenuationMb) != 2) || + (aidlCurvePoint.index < AudioHalVolumeCurve::CurvePoint::MIN_INDEX) || + (aidlCurvePoint.index > AudioHalVolumeCurve::CurvePoint::MAX_INDEX)) { + LOG(ERROR) << __func__ << " Review Audio Policy config: volume curve point:" + << "\"" << xsdcCurvePoint << "\" is invalid"; + return unexpected(BAD_VALUE); + } + return aidlCurvePoint; +} + +/** + * The hard coded id must be in sync with policy.h definition of legacy strategy ids. + */ +std::unordered_map getLegacyProductStrategyMap() { +#define STRATEGY_ENTRY(name, id) {"STRATEGY_" #name, static_cast(id)} + + return {STRATEGY_ENTRY(MEDIA, 5), + STRATEGY_ENTRY(PHONE, 0), + STRATEGY_ENTRY(SONIFICATION, 1), + STRATEGY_ENTRY(SONIFICATION_RESPECTFUL, 4), + STRATEGY_ENTRY(DTMF, 6), + STRATEGY_ENTRY(ENFORCED_AUDIBLE, 2), + STRATEGY_ENTRY(CALL_ASSISTANT, 7), + STRATEGY_ENTRY(TRANSMITTED_THROUGH_SPEAKER,8), + STRATEGY_ENTRY(ACCESSIBILITY, 3)}; +#undef STRATEGY_ENTRY +} + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/acousticEchoCanceler/AcousticEchoCancelerSw.cpp b/audio/acousticEchoCanceler/AcousticEchoCancelerSw.cpp new file mode 100644 index 0000000..be0927c --- /dev/null +++ b/audio/acousticEchoCanceler/AcousticEchoCancelerSw.cpp @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#define LOG_TAG "AHAL_AcousticEchoCancelerSw" +#include +#include +#include + +#include "AcousticEchoCancelerSw.h" + +using aidl::android::hardware::audio::effect::AcousticEchoCancelerSw; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidAcousticEchoCancelerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::Range; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAcousticEchoCancelerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAcousticEchoCancelerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = AcousticEchoCancelerSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string AcousticEchoCancelerSw::kEffectName = "AcousticEchoCancelerSw"; + +const std::vector AcousticEchoCancelerSw::kRanges = { + MAKE_RANGE(AcousticEchoCanceler, echoDelayUs, 0, 500), + /* mobile mode not supported, and not settable */ + MAKE_RANGE(AcousticEchoCanceler, mobileMode, false, false)}; + +const Capability AcousticEchoCancelerSw::kCapability = {.range = AcousticEchoCancelerSw::kRanges}; + +const Descriptor AcousticEchoCancelerSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidAcousticEchoCanceler(), + .uuid = getEffectImplUuidAcousticEchoCancelerSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::PRE_PROC, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::NONE}, + .name = AcousticEchoCancelerSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = AcousticEchoCancelerSw::kCapability}; + +ndk::ScopedAStatus AcousticEchoCancelerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus AcousticEchoCancelerSw::setParameterSpecific( + const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::acousticEchoCanceler != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + + auto tag = param.getTag(); + switch (tag) { + case AcousticEchoCanceler::echoDelayUs: { + RETURN_IF(mContext->setEchoDelay(param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "echoDelayNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case AcousticEchoCanceler::mobileMode: { + RETURN_IF(true == param.get(), EX_ILLEGAL_ARGUMENT, + "SettingmobileModeSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported"); + } + } +} + +ndk::ScopedAStatus AcousticEchoCancelerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::acousticEchoCancelerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto specificId = id.get(); + auto specificIdTag = specificId.getTag(); + switch (specificIdTag) { + case AcousticEchoCanceler::Id::commonTag: + return getParameterAcousticEchoCanceler( + specificId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported"); + } +} + +ndk::ScopedAStatus AcousticEchoCancelerSw::getParameterAcousticEchoCanceler( + const AcousticEchoCanceler::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + AcousticEchoCanceler param; + switch (tag) { + case AcousticEchoCanceler::echoDelayUs: { + param.set(mContext->getEchoDelay()); + break; + } + case AcousticEchoCanceler::mobileMode: { + param.set(false); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported"); + } + } + + specific->set(param); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr AcousticEchoCancelerSw::createContext( + const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode AcousticEchoCancelerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status AcousticEchoCancelerSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode AcousticEchoCancelerSwContext::setEchoDelay(int echoDelayUs) { + mEchoDelayUs = echoDelayUs; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/acousticEchoCanceler/AcousticEchoCancelerSw.h b/audio/acousticEchoCanceler/AcousticEchoCancelerSw.h new file mode 100644 index 0000000..95738f8 --- /dev/null +++ b/audio/acousticEchoCanceler/AcousticEchoCancelerSw.h @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class AcousticEchoCancelerSwContext final : public EffectContext { + public: + AcousticEchoCancelerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setEchoDelay(int echoDelayUs); + int getEchoDelay() const { return mEchoDelayUs; } + + private: + int mEchoDelayUs = 0; +}; + +class AcousticEchoCancelerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + AcousticEchoCancelerSw() { LOG(DEBUG) << __func__; } + ~AcousticEchoCancelerSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) override; + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/acousticEchoCanceler/Android.bp b/audio/acousticEchoCanceler/Android.bp new file mode 100644 index 0000000..46930e0 --- /dev/null +++ b/audio/acousticEchoCanceler/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libaecsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "AcousticEchoCancelerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/alsa/Mixer.cpp b/audio/alsa/Mixer.cpp new file mode 100644 index 0000000..e72502b --- /dev/null +++ b/audio/alsa/Mixer.cpp @@ -0,0 +1,297 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_AlsaMixer" +#include +#include +#include + +#include "Mixer.h" + +namespace ndk { + +// This enables use of 'error/expected_utils' for ScopedAStatus. + +inline bool errorIsOk(const ScopedAStatus& s) { + return s.isOk(); +} + +inline std::string errorToString(const ScopedAStatus& s) { + return s.getDescription(); +} + +} // namespace ndk + +namespace aidl::android::hardware::audio::core::alsa { + +// static +const std::map> + Mixer::kPossibleControls = { + {Mixer::MASTER_SWITCH, {{"Master Playback Switch", MIXER_CTL_TYPE_BOOL}}}, + {Mixer::MASTER_VOLUME, {{"Master Playback Volume", MIXER_CTL_TYPE_INT}}}, + {Mixer::HW_VOLUME, + {{"Headphone Playback Volume", MIXER_CTL_TYPE_INT}, + {"Headset Playback Volume", MIXER_CTL_TYPE_INT}, + {"PCM Playback Volume", MIXER_CTL_TYPE_INT}}}, + {Mixer::MIC_SWITCH, {{"Capture Switch", MIXER_CTL_TYPE_BOOL}}}, + {Mixer::MIC_GAIN, {{"Capture Volume", MIXER_CTL_TYPE_INT}}}}; + +// static +Mixer::Controls Mixer::initializeMixerControls(struct mixer* mixer) { + if (mixer == nullptr) return {}; + Controls mixerControls; + std::string mixerCtlNames; + for (const auto& [control, possibleCtls] : kPossibleControls) { + for (const auto& [ctlName, expectedCtlType] : possibleCtls) { + struct mixer_ctl* ctl = mixer_get_ctl_by_name(mixer, ctlName.c_str()); + if (ctl != nullptr && mixer_ctl_get_type(ctl) == expectedCtlType) { + mixerControls.emplace(control, ctl); + if (!mixerCtlNames.empty()) { + mixerCtlNames += ","; + } + mixerCtlNames += ctlName; + break; + } + } + } + LOG(DEBUG) << __func__ << ": available mixer control names=[" << mixerCtlNames << "]"; + return mixerControls; +} + +std::ostream& operator<<(std::ostream& s, Mixer::Control c) { + switch (c) { + case Mixer::Control::MASTER_SWITCH: + s << "master mute"; + break; + case Mixer::Control::MASTER_VOLUME: + s << "master volume"; + break; + case Mixer::Control::HW_VOLUME: + s << "volume"; + break; + case Mixer::Control::MIC_SWITCH: + s << "mic mute"; + break; + case Mixer::Control::MIC_GAIN: + s << "mic gain"; + break; + } + return s; +} + +Mixer::Mixer(int card) : mMixer(mixer_open(card)), mMixerControls(initializeMixerControls(mMixer)) { + if (!isValid()) { + PLOG(ERROR) << __func__ << ": failed to open mixer for card=" << card; + } +} + +Mixer::~Mixer() { + if (isValid()) { + std::lock_guard l(mMixerAccess); + mixer_close(mMixer); + } +} + +ndk::ScopedAStatus Mixer::getMasterMute(bool* muted) { + return getMixerControlMute(MASTER_SWITCH, muted); +} + +ndk::ScopedAStatus Mixer::getMasterVolume(float* volume) { + return getMixerControlVolume(MASTER_VOLUME, volume); +} + +ndk::ScopedAStatus Mixer::getMicGain(float* gain) { + return getMixerControlVolume(MIC_GAIN, gain); +} + +ndk::ScopedAStatus Mixer::getMicMute(bool* muted) { + return getMixerControlMute(MIC_SWITCH, muted); +} + +ndk::ScopedAStatus Mixer::getVolumes(std::vector* volumes) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(Mixer::HW_VOLUME, &mctl)); + std::vector percents; + std::lock_guard l(mMixerAccess); + if (int err = getMixerControlPercent(mctl, &percents); err != 0) { + LOG(ERROR) << __func__ << ": failed to get volume, err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + std::transform(percents.begin(), percents.end(), std::back_inserter(*volumes), + [](int percent) -> float { return std::clamp(percent / 100.0f, 0.0f, 1.0f); }); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Mixer::setMasterMute(bool muted) { + return setMixerControlMute(MASTER_SWITCH, muted); +} + +ndk::ScopedAStatus Mixer::setMasterVolume(float volume) { + return setMixerControlVolume(MASTER_VOLUME, volume); +} + +ndk::ScopedAStatus Mixer::setMicGain(float gain) { + return setMixerControlVolume(MIC_GAIN, gain); +} + +ndk::ScopedAStatus Mixer::setMicMute(bool muted) { + return setMixerControlMute(MIC_SWITCH, muted); +} + +ndk::ScopedAStatus Mixer::setVolumes(const std::vector& volumes) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(Mixer::HW_VOLUME, &mctl)); + std::vector percents; + std::transform( + volumes.begin(), volumes.end(), std::back_inserter(percents), + [](float volume) -> int { return std::floor(std::clamp(volume, 0.0f, 1.0f) * 100); }); + std::lock_guard l(mMixerAccess); + if (int err = setMixerControlPercent(mctl, percents); err != 0) { + LOG(ERROR) << __func__ << ": failed to set volume, err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Mixer::findControl(Control ctl, struct mixer_ctl** result) { + if (!isValid()) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (auto it = mMixerControls.find(ctl); it != mMixerControls.end()) { + *result = it->second; + return ndk::ScopedAStatus::ok(); + } + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus Mixer::getMixerControlMute(Control ctl, bool* muted) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl)); + std::lock_guard l(mMixerAccess); + std::vector mutedValues; + if (int err = getMixerControlValues(mctl, &mutedValues); err != 0) { + LOG(ERROR) << __func__ << ": failed to get " << ctl << ", err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (mutedValues.empty()) { + LOG(ERROR) << __func__ << ": got no values for " << ctl; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + *muted = mutedValues[0] != 0; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Mixer::getMixerControlVolume(Control ctl, float* volume) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl)); + std::lock_guard l(mMixerAccess); + std::vector percents; + if (int err = getMixerControlPercent(mctl, &percents); err != 0) { + LOG(ERROR) << __func__ << ": failed to get " << ctl << ", err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (percents.empty()) { + LOG(ERROR) << __func__ << ": got no values for " << ctl; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + *volume = std::clamp(percents[0] / 100.0f, 0.0f, 1.0f); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Mixer::setMixerControlMute(Control ctl, bool muted) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl)); + std::lock_guard l(mMixerAccess); + if (int err = setMixerControlValue(mctl, muted ? 0 : 1); err != 0) { + LOG(ERROR) << __func__ << ": failed to set " << ctl << " to " << muted << ", err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus Mixer::setMixerControlVolume(Control ctl, float volume) { + struct mixer_ctl* mctl; + RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl)); + volume = std::clamp(volume, 0.0f, 1.0f); + std::lock_guard l(mMixerAccess); + if (int err = setMixerControlPercent(mctl, std::floor(volume * 100)); err != 0) { + LOG(ERROR) << __func__ << ": failed to set " << ctl << " to " << volume << ", err=" << err; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return ndk::ScopedAStatus::ok(); +} + +int Mixer::getMixerControlPercent(struct mixer_ctl* ctl, std::vector* percents) { + const unsigned int n = mixer_ctl_get_num_values(ctl); + percents->resize(n); + for (unsigned int id = 0; id < n; id++) { + if (int valueOrError = mixer_ctl_get_percent(ctl, id); valueOrError >= 0) { + (*percents)[id] = valueOrError; + } else { + return valueOrError; + } + } + return 0; +} + +int Mixer::getMixerControlValues(struct mixer_ctl* ctl, std::vector* values) { + const unsigned int n = mixer_ctl_get_num_values(ctl); + values->resize(n); + for (unsigned int id = 0; id < n; id++) { + if (int valueOrError = mixer_ctl_get_value(ctl, id); valueOrError >= 0) { + (*values)[id] = valueOrError; + } else { + return valueOrError; + } + } + return 0; +} + +int Mixer::setMixerControlPercent(struct mixer_ctl* ctl, int percent) { + const unsigned int n = mixer_ctl_get_num_values(ctl); + for (unsigned int id = 0; id < n; id++) { + if (int error = mixer_ctl_set_percent(ctl, id, percent); error != 0) { + return error; + } + } + return 0; +} + +int Mixer::setMixerControlPercent(struct mixer_ctl* ctl, const std::vector& percents) { + const unsigned int n = mixer_ctl_get_num_values(ctl); + for (unsigned int id = 0; id < n; id++) { + if (int error = mixer_ctl_set_percent(ctl, id, id < percents.size() ? percents[id] : 0); + error != 0) { + return error; + } + } + return 0; +} + +int Mixer::setMixerControlValue(struct mixer_ctl* ctl, int value) { + const unsigned int n = mixer_ctl_get_num_values(ctl); + for (unsigned int id = 0; id < n; id++) { + if (int error = mixer_ctl_set_value(ctl, id, value); error != 0) { + return error; + } + } + return 0; +} + +} // namespace aidl::android::hardware::audio::core::alsa diff --git a/audio/alsa/Mixer.h b/audio/alsa/Mixer.h new file mode 100644 index 0000000..41f19a8 --- /dev/null +++ b/audio/alsa/Mixer.h @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include + +extern "C" { +#include +} + +namespace aidl::android::hardware::audio::core::alsa { + +class Mixer { + public: + explicit Mixer(int card); + ~Mixer(); + + bool isValid() const { return mMixer != nullptr; } + + ndk::ScopedAStatus getMasterMute(bool* muted); + ndk::ScopedAStatus getMasterVolume(float* volume); + ndk::ScopedAStatus getMicGain(float* gain); + ndk::ScopedAStatus getMicMute(bool* muted); + ndk::ScopedAStatus getVolumes(std::vector* volumes); + ndk::ScopedAStatus setMasterMute(bool muted); + ndk::ScopedAStatus setMasterVolume(float volume); + ndk::ScopedAStatus setMicGain(float gain); + ndk::ScopedAStatus setMicMute(bool muted); + ndk::ScopedAStatus setVolumes(const std::vector& volumes); + + private: + enum Control { + MASTER_SWITCH, + MASTER_VOLUME, + HW_VOLUME, + MIC_SWITCH, + MIC_GAIN, + }; + using ControlNamesAndExpectedCtlType = std::pair; + using Controls = std::map; + + friend std::ostream& operator<<(std::ostream&, Control); + static const std::map> kPossibleControls; + static Controls initializeMixerControls(struct mixer* mixer); + + ndk::ScopedAStatus findControl(Control ctl, struct mixer_ctl** result); + ndk::ScopedAStatus getMixerControlMute(Control ctl, bool* muted); + ndk::ScopedAStatus getMixerControlVolume(Control ctl, float* volume); + ndk::ScopedAStatus setMixerControlMute(Control ctl, bool muted); + ndk::ScopedAStatus setMixerControlVolume(Control ctl, float volume); + + int getMixerControlPercent(struct mixer_ctl* ctl, std::vector* percents) + REQUIRES(mMixerAccess); + int getMixerControlValues(struct mixer_ctl* ctl, std::vector* values) + REQUIRES(mMixerAccess); + int setMixerControlPercent(struct mixer_ctl* ctl, int percent) REQUIRES(mMixerAccess); + int setMixerControlPercent(struct mixer_ctl* ctl, const std::vector& percents) + REQUIRES(mMixerAccess); + int setMixerControlValue(struct mixer_ctl* ctl, int value) REQUIRES(mMixerAccess); + + // Since ALSA functions do not use internal locking, enforce thread safety at our level. + std::mutex mMixerAccess; + // The mixer object is owned by ALSA and will be released when the mixer is closed. + struct mixer* const mMixer; + // `mMixerControls` will only be initialized in constructor. After that, it will only be + // read but not be modified. Each mixer_ctl object is owned by ALSA, it's life span is + // the same as of the mixer itself. + const Controls mMixerControls; +}; + +} // namespace aidl::android::hardware::audio::core::alsa diff --git a/audio/alsa/ModuleAlsa.cpp b/audio/alsa/ModuleAlsa.cpp new file mode 100644 index 0000000..9a2cce7 --- /dev/null +++ b/audio/alsa/ModuleAlsa.cpp @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_ModuleAlsa" + +#include + +#include + +#include "Utils.h" +#include "core-impl/ModuleAlsa.h" + +extern "C" { +#include "alsa_device_profile.h" +} + +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioProfile; + +namespace aidl::android::hardware::audio::core { + +ndk::ScopedAStatus ModuleAlsa::populateConnectedDevicePort(AudioPort* audioPort, int32_t) { + auto deviceProfile = alsa::getDeviceProfile(*audioPort); + if (!deviceProfile.has_value()) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + auto proxy = alsa::readAlsaDeviceInfo(*deviceProfile); + if (proxy.get() == nullptr) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + + alsa_device_profile* profile = proxy.getProfile(); + std::vector channels = alsa::getChannelMasksFromProfile(profile); + std::vector sampleRates = alsa::getSampleRatesFromProfile(profile); + + for (size_t i = 0; i < std::min(MAX_PROFILE_FORMATS, AUDIO_PORT_MAX_AUDIO_PROFILES) && + profile->formats[i] != PCM_FORMAT_INVALID; + ++i) { + auto audioFormatDescription = + alsa::c2aidl_pcm_format_AudioFormatDescription(profile->formats[i]); + if (audioFormatDescription.type == AudioFormatType::DEFAULT) { + LOG(WARNING) << __func__ << ": unknown pcm type=" << profile->formats[i]; + continue; + } + AudioProfile audioProfile = {.format = audioFormatDescription, + .channelMasks = channels, + .sampleRates = sampleRates}; + audioPort->profiles.push_back(std::move(audioProfile)); + } + return ndk::ScopedAStatus::ok(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/alsa/StreamAlsa.cpp b/audio/alsa/StreamAlsa.cpp new file mode 100644 index 0000000..7a44cc7 --- /dev/null +++ b/audio/alsa/StreamAlsa.cpp @@ -0,0 +1,326 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_StreamAlsa" +#include + +#include +#include +#include +#include + +#include "core-impl/StreamAlsa.h" + +using aidl::android::hardware::audio::common::getChannelCount; + +namespace aidl::android::hardware::audio::core { + +StreamAlsa::StreamAlsa(StreamContext* context, const Metadata& metadata, int readWriteRetries) + : StreamCommonImpl(context, metadata), + mBufferSizeFrames(getContext().getBufferSizeInFrames()), + mFrameSizeBytes(getContext().getFrameSize()), + mSampleRate(getContext().getSampleRate()), + mIsInput(isInput(metadata)), + mConfig(alsa::getPcmConfig(getContext(), mIsInput)), + mReadWriteRetries(readWriteRetries) {} + +StreamAlsa::~StreamAlsa() { + cleanupWorker(); +} + +::android::NBAIO_Format StreamAlsa::getPipeFormat() const { + const audio_format_t audioFormat = VALUE_OR_FATAL( + aidl2legacy_AudioFormatDescription_audio_format_t(getContext().getFormat())); + const int channelCount = getChannelCount(getContext().getChannelLayout()); + return ::android::Format_from_SR_C(getContext().getSampleRate(), channelCount, audioFormat); +} + +::android::sp<::android::MonoPipe> StreamAlsa::makeSink(bool writeCanBlock) { + const ::android::NBAIO_Format format = getPipeFormat(); + auto sink = ::android::sp<::android::MonoPipe>::make(mBufferSizeFrames, format, writeCanBlock); + const ::android::NBAIO_Format offers[1] = {format}; + size_t numCounterOffers = 0; + ssize_t index = sink->negotiate(offers, 1, nullptr, numCounterOffers); + LOG_IF(FATAL, index != 0) << __func__ << ": Negotiation for the sink failed, index = " << index; + return sink; +} + +::android::sp<::android::MonoPipeReader> StreamAlsa::makeSource(::android::MonoPipe* pipe) { + const ::android::NBAIO_Format format = getPipeFormat(); + const ::android::NBAIO_Format offers[1] = {format}; + auto source = ::android::sp<::android::MonoPipeReader>::make(pipe); + size_t numCounterOffers = 0; + ssize_t index = source->negotiate(offers, 1, nullptr, numCounterOffers); + LOG_IF(FATAL, index != 0) << __func__ + << ": Negotiation for the source failed, index = " << index; + return source; +} + +::android::status_t StreamAlsa::init(DriverCallbackInterface* /*callback*/) { + return mConfig.has_value() ? ::android::OK : ::android::NO_INIT; +} + +::android::status_t StreamAlsa::drain(StreamDescriptor::DrainMode) { + if (!mIsInput) { + static constexpr float kMicrosPerSecond = MICROS_PER_SECOND; + const size_t delayUs = static_cast( + std::roundf(mBufferSizeFrames * kMicrosPerSecond / mSampleRate)); + usleep(delayUs); + } + return ::android::OK; +} + +::android::status_t StreamAlsa::flush() { + return ::android::OK; +} + +::android::status_t StreamAlsa::pause() { + return ::android::OK; +} + +::android::status_t StreamAlsa::standby() { + teardownIo(); + return ::android::OK; +} + +::android::status_t StreamAlsa::start() { + if (!mAlsaDeviceProxies.empty()) { + // This is a resume after a pause. + return ::android::OK; + } + decltype(mAlsaDeviceProxies) alsaDeviceProxies; + decltype(mSources) sources; + decltype(mSinks) sinks; + for (const auto& device : getDeviceProfiles()) { + if ((device.direction == PCM_OUT && mIsInput) || + (device.direction == PCM_IN && !mIsInput)) { + continue; + } + alsa::DeviceProxy proxy; + if (device.isExternal) { + // Always ask alsa configure as required since the configuration should be supported + // by the connected device. That is guaranteed by `setAudioPortConfig` and + // `setAudioPatch`. + proxy = alsa::openProxyForExternalDevice( + device, const_cast(&mConfig.value()), + true /*require_exact_match*/); + } else { + proxy = alsa::openProxyForAttachedDevice( + device, const_cast(&mConfig.value()), mBufferSizeFrames); + } + if (proxy.get() == nullptr) { + return ::android::NO_INIT; + } + alsaDeviceProxies.push_back(std::move(proxy)); + auto sink = makeSink(mIsInput); // Do not block the writer when it is on our thread. + if (sink != nullptr) { + sinks.push_back(sink); + } else { + return ::android::NO_INIT; + } + if (auto source = makeSource(sink.get()); source != nullptr) { + sources.push_back(source); + } else { + return ::android::NO_INIT; + } + } + if (alsaDeviceProxies.empty()) { + return ::android::NO_INIT; + } + mAlsaDeviceProxies = std::move(alsaDeviceProxies); + mSources = std::move(sources); + mSinks = std::move(sinks); + mIoThreadIsRunning = true; + for (size_t i = 0; i < mAlsaDeviceProxies.size(); ++i) { + mIoThreads.emplace_back(mIsInput ? &StreamAlsa::inputIoThread : &StreamAlsa::outputIoThread, + this, i); + } + return ::android::OK; +} + +::android::status_t StreamAlsa::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) { + if (mAlsaDeviceProxies.empty()) { + LOG(FATAL) << __func__ << ": no opened devices"; + return ::android::NO_INIT; + } + const size_t bytesToTransfer = frameCount * mFrameSizeBytes; + unsigned maxLatency = 0; + if (mIsInput) { + const size_t i = 0; // For the input case, only support a single device. + LOG(VERBOSE) << __func__ << ": reading from sink " << i; + ssize_t framesRead = mSources[i]->read(buffer, frameCount); + LOG_IF(FATAL, framesRead < 0) << "Error reading from the pipe: " << framesRead; + if (ssize_t framesMissing = static_cast(frameCount) - framesRead; + framesMissing > 0) { + LOG(WARNING) << __func__ << ": incomplete data received, inserting " << framesMissing + << " frames of silence"; + memset(static_cast(buffer) + framesRead * mFrameSizeBytes, 0, + framesMissing * mFrameSizeBytes); + } + maxLatency = proxy_get_latency(mAlsaDeviceProxies[i].get()); + } else { + alsa::applyGain(buffer, mGain, bytesToTransfer, mConfig.value().format, mConfig->channels); + for (size_t i = 0; i < mAlsaDeviceProxies.size(); ++i) { + LOG(VERBOSE) << __func__ << ": writing into sink " << i; + ssize_t framesWritten = mSinks[i]->write(buffer, frameCount); + LOG_IF(FATAL, framesWritten < 0) << "Error writing into the pipe: " << framesWritten; + if (ssize_t framesLost = static_cast(frameCount) - framesWritten; + framesLost > 0) { + LOG(WARNING) << __func__ << ": sink " << i << " incomplete data sent, dropping " + << framesLost << " frames"; + } + maxLatency = std::max(maxLatency, proxy_get_latency(mAlsaDeviceProxies[i].get())); + } + } + *actualFrameCount = frameCount; + maxLatency = std::min(maxLatency, static_cast(std::numeric_limits::max())); + *latencyMs = maxLatency; + return ::android::OK; +} + +::android::status_t StreamAlsa::refinePosition(StreamDescriptor::Position* position) { + if (mAlsaDeviceProxies.empty()) { + LOG(WARNING) << __func__ << ": no opened devices"; + return ::android::NO_INIT; + } + // Since the proxy can only count transferred frames since its creation, + // we override its counter value with ours and let it to correct for buffered frames. + alsa::resetTransferredFrames(mAlsaDeviceProxies[0], position->frames); + if (mIsInput) { + if (int ret = proxy_get_capture_position(mAlsaDeviceProxies[0].get(), &position->frames, + &position->timeNs); + ret != 0) { + LOG(WARNING) << __func__ << ": failed to retrieve capture position: " << ret; + return ::android::INVALID_OPERATION; + } + } else { + uint64_t hwFrames; + struct timespec timestamp; + if (int ret = proxy_get_presentation_position(mAlsaDeviceProxies[0].get(), &hwFrames, + ×tamp); + ret == 0) { + if (hwFrames > std::numeric_limits::max()) { + hwFrames -= std::numeric_limits::max(); + } + position->frames = static_cast(hwFrames); + position->timeNs = audio_utils_ns_from_timespec(×tamp); + } else { + LOG(WARNING) << __func__ << ": failed to retrieve presentation position: " << ret; + return ::android::INVALID_OPERATION; + } + } + return ::android::OK; +} + +void StreamAlsa::shutdown() { + teardownIo(); +} + +ndk::ScopedAStatus StreamAlsa::setGain(float gain) { + mGain = gain; + return ndk::ScopedAStatus::ok(); +} + +void StreamAlsa::inputIoThread(size_t idx) { +#if defined(__ANDROID__) + setWorkerThreadPriority(pthread_gettid_np(pthread_self())); + const std::string threadName = (std::string("in_") + std::to_string(idx)).substr(0, 15); + pthread_setname_np(pthread_self(), threadName.c_str()); +#endif + const size_t bufferSize = mBufferSizeFrames * mFrameSizeBytes; + std::vector buffer(bufferSize); + while (mIoThreadIsRunning) { + if (int ret = proxy_read_with_retries(mAlsaDeviceProxies[idx].get(), &buffer[0], bufferSize, + mReadWriteRetries); + ret == 0) { + size_t bufferFramesWritten = 0; + while (bufferFramesWritten < mBufferSizeFrames) { + if (!mIoThreadIsRunning) return; + ssize_t framesWrittenOrError = + mSinks[idx]->write(&buffer[0], mBufferSizeFrames - bufferFramesWritten); + if (framesWrittenOrError >= 0) { + bufferFramesWritten += framesWrittenOrError; + } else { + LOG(WARNING) << __func__ << "[" << idx + << "]: Error while writing into the pipe: " + << framesWrittenOrError; + } + } + } else { + // Errors when the stream is being stopped are expected. + LOG_IF(WARNING, mIoThreadIsRunning) + << __func__ << "[" << idx << "]: Error reading from ALSA: " << ret; + } + } +} + +void StreamAlsa::outputIoThread(size_t idx) { +#if defined(__ANDROID__) + setWorkerThreadPriority(pthread_gettid_np(pthread_self())); + const std::string threadName = (std::string("out_") + std::to_string(idx)).substr(0, 15); + pthread_setname_np(pthread_self(), threadName.c_str()); +#endif + const size_t bufferSize = mBufferSizeFrames * mFrameSizeBytes; + std::vector buffer(bufferSize); + while (mIoThreadIsRunning) { + ssize_t framesReadOrError = mSources[idx]->read(&buffer[0], mBufferSizeFrames); + if (framesReadOrError > 0) { + int ret = proxy_write_with_retries(mAlsaDeviceProxies[idx].get(), &buffer[0], + framesReadOrError * mFrameSizeBytes, + mReadWriteRetries); + // Errors when the stream is being stopped are expected. + LOG_IF(WARNING, ret != 0 && mIoThreadIsRunning) + << __func__ << "[" << idx << "]: Error writing into ALSA: " << ret; + } else if (framesReadOrError == 0) { + // MonoPipeReader does not have a blocking read, while use of std::condition_variable + // requires use of a mutex. For now, just do a 1ms sleep. Consider using a different + // pipe / ring buffer mechanism. + if (mIoThreadIsRunning) usleep(1000); + } else { + LOG(WARNING) << __func__ << "[" << idx + << "]: Error while reading from the pipe: " << framesReadOrError; + } + } +} + +void StreamAlsa::teardownIo() { + mIoThreadIsRunning = false; + if (mIsInput) { + LOG(DEBUG) << __func__ << ": shutting down pipes"; + for (auto& sink : mSinks) { + sink->shutdown(true); + } + } + LOG(DEBUG) << __func__ << ": stopping PCM streams"; + for (const auto& proxy : mAlsaDeviceProxies) { + proxy_stop(proxy.get()); + } + LOG(DEBUG) << __func__ << ": joining threads"; + for (auto& thread : mIoThreads) { + if (thread.joinable()) thread.join(); + } + mIoThreads.clear(); + LOG(DEBUG) << __func__ << ": closing PCM devices"; + mAlsaDeviceProxies.clear(); + mSources.clear(); + mSinks.clear(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/alsa/Utils.cpp b/audio/alsa/Utils.cpp new file mode 100644 index 0000000..77e4f65 --- /dev/null +++ b/audio/alsa/Utils.cpp @@ -0,0 +1,480 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_AlsaUtils" +#include +#include +#include +#include +#include +#include + +#include "Utils.h" +#include "core-impl/utils.h" + +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::PcmType; + +namespace aidl::android::hardware::audio::core::alsa { + +const float kUnityGainFloat = 1.0f; + +DeviceProxy::DeviceProxy() : mProfile(nullptr), mProxy(nullptr, alsaProxyDeleter) {} + +DeviceProxy::DeviceProxy(const DeviceProfile& deviceProfile) + : mProfile(new alsa_device_profile), mProxy(new alsa_device_proxy, alsaProxyDeleter) { + profile_init(mProfile.get(), deviceProfile.direction); + mProfile->card = deviceProfile.card; + mProfile->device = deviceProfile.device; + memset(mProxy.get(), 0, sizeof(alsa_device_proxy)); +} + +void DeviceProxy::alsaProxyDeleter(alsa_device_proxy* proxy) { + if (proxy != nullptr) { + proxy_close(proxy); + delete proxy; + } +} + +namespace { + +using AudioChannelCountToMaskMap = std::map; +using AudioFormatDescToPcmFormatMap = std::map; +using PcmFormatToAudioFormatDescMap = std::map; + +AudioChannelLayout getInvalidChannelLayout() { + static const AudioChannelLayout invalidChannelLayout = + AudioChannelLayout::make(0); + return invalidChannelLayout; +} + +static AudioChannelCountToMaskMap make_ChannelCountToMaskMap( + const std::set& channelMasks) { + AudioChannelCountToMaskMap channelMaskToCountMap; + for (const auto& channelMask : channelMasks) { + channelMaskToCountMap.emplace(getChannelCount(channelMask), channelMask); + } + return channelMaskToCountMap; +} + +#define DEFINE_CHANNEL_LAYOUT_MASK(n) \ + AudioChannelLayout::make(AudioChannelLayout::LAYOUT_##n) + +const AudioChannelCountToMaskMap& getSupportedChannelOutLayoutMap() { + static const std::set supportedOutChannelLayouts = { + DEFINE_CHANNEL_LAYOUT_MASK(MONO), + DEFINE_CHANNEL_LAYOUT_MASK(STEREO), + }; + static const AudioChannelCountToMaskMap outLayouts = + make_ChannelCountToMaskMap(supportedOutChannelLayouts); + return outLayouts; +} + +const AudioChannelCountToMaskMap& getSupportedChannelInLayoutMap() { + static const std::set supportedInChannelLayouts = { + DEFINE_CHANNEL_LAYOUT_MASK(MONO), + DEFINE_CHANNEL_LAYOUT_MASK(STEREO), + }; + static const AudioChannelCountToMaskMap inLayouts = + make_ChannelCountToMaskMap(supportedInChannelLayouts); + return inLayouts; +} + +#undef DEFINE_CHANNEL_LAYOUT_MASK +#define DEFINE_CHANNEL_INDEX_MASK(n) \ + AudioChannelLayout::make(AudioChannelLayout::INDEX_MASK_##n) + +const AudioChannelCountToMaskMap& getSupportedChannelIndexLayoutMap() { + static const std::set supportedIndexChannelLayouts = { + DEFINE_CHANNEL_INDEX_MASK(1), DEFINE_CHANNEL_INDEX_MASK(2), + DEFINE_CHANNEL_INDEX_MASK(3), DEFINE_CHANNEL_INDEX_MASK(4), + DEFINE_CHANNEL_INDEX_MASK(5), DEFINE_CHANNEL_INDEX_MASK(6), + DEFINE_CHANNEL_INDEX_MASK(7), DEFINE_CHANNEL_INDEX_MASK(8), + DEFINE_CHANNEL_INDEX_MASK(9), DEFINE_CHANNEL_INDEX_MASK(10), + DEFINE_CHANNEL_INDEX_MASK(11), DEFINE_CHANNEL_INDEX_MASK(12), + DEFINE_CHANNEL_INDEX_MASK(13), DEFINE_CHANNEL_INDEX_MASK(14), + DEFINE_CHANNEL_INDEX_MASK(15), DEFINE_CHANNEL_INDEX_MASK(16), + DEFINE_CHANNEL_INDEX_MASK(17), DEFINE_CHANNEL_INDEX_MASK(18), + DEFINE_CHANNEL_INDEX_MASK(19), DEFINE_CHANNEL_INDEX_MASK(20), + DEFINE_CHANNEL_INDEX_MASK(21), DEFINE_CHANNEL_INDEX_MASK(22), + DEFINE_CHANNEL_INDEX_MASK(23), DEFINE_CHANNEL_INDEX_MASK(24), + }; + static const AudioChannelCountToMaskMap indexLayouts = + make_ChannelCountToMaskMap(supportedIndexChannelLayouts); + return indexLayouts; +} + +#undef DEFINE_CHANNEL_INDEX_MASK + +AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) { + AudioFormatDescription result; + result.type = type; + return result; +} + +AudioFormatDescription make_AudioFormatDescription(PcmType pcm) { + auto result = make_AudioFormatDescription(AudioFormatType::PCM); + result.pcm = pcm; + return result; +} + +const AudioFormatDescToPcmFormatMap& getAudioFormatDescriptorToPcmFormatMap() { + static const AudioFormatDescToPcmFormatMap formatDescToPcmFormatMap = { + {make_AudioFormatDescription(PcmType::INT_16_BIT), PCM_FORMAT_S16_LE}, + {make_AudioFormatDescription(PcmType::FIXED_Q_8_24), PCM_FORMAT_S24_LE}, + {make_AudioFormatDescription(PcmType::INT_24_BIT), PCM_FORMAT_S24_3LE}, + {make_AudioFormatDescription(PcmType::INT_32_BIT), PCM_FORMAT_S32_LE}, + {make_AudioFormatDescription(PcmType::FLOAT_32_BIT), PCM_FORMAT_FLOAT_LE}, + }; + return formatDescToPcmFormatMap; +} + +static PcmFormatToAudioFormatDescMap make_PcmFormatToAudioFormatDescMap( + const AudioFormatDescToPcmFormatMap& formatDescToPcmFormatMap) { + PcmFormatToAudioFormatDescMap result; + for (const auto& formatPair : formatDescToPcmFormatMap) { + result.emplace(formatPair.second, formatPair.first); + } + return result; +} + +const PcmFormatToAudioFormatDescMap& getPcmFormatToAudioFormatDescMap() { + static const PcmFormatToAudioFormatDescMap pcmFormatToFormatDescMap = + make_PcmFormatToAudioFormatDescMap(getAudioFormatDescriptorToPcmFormatMap()); + return pcmFormatToFormatDescMap; +} + +void applyGainToInt16Buffer(void* buffer, const size_t bufferSizeBytes, const float gain, + int channelCount) { + const uint16_t unityGainQ4_12 = u4_12_from_float(kUnityGainFloat); + const uint16_t vl = u4_12_from_float(gain); + const uint32_t vrl = (vl << 16) | vl; + int numFrames = 0; + if (channelCount == 2) { + numFrames = bufferSizeBytes / sizeof(uint32_t); + if (numFrames == 0) { + return; + } + uint32_t* intBuffer = (uint32_t*)buffer; + if (CC_UNLIKELY(vl > unityGainQ4_12)) { + do { + int32_t l = mulRL(1, *intBuffer, vrl) >> 12; + int32_t r = mulRL(0, *intBuffer, vrl) >> 12; + l = clamp16(l); + r = clamp16(r); + *intBuffer++ = (r << 16) | (l & 0xFFFF); + } while (--numFrames); + } else { + do { + int32_t l = mulRL(1, *intBuffer, vrl) >> 12; + int32_t r = mulRL(0, *intBuffer, vrl) >> 12; + *intBuffer++ = (r << 16) | (l & 0xFFFF); + } while (--numFrames); + } + } else { + numFrames = bufferSizeBytes / sizeof(uint16_t); + if (numFrames == 0) { + return; + } + int16_t* intBuffer = (int16_t*)buffer; + if (CC_UNLIKELY(vl > unityGainQ4_12)) { + do { + int32_t mono = mul(*intBuffer, static_cast(vl)) >> 12; + *intBuffer++ = clamp16(mono); + } while (--numFrames); + } else { + do { + int32_t mono = mul(*intBuffer, static_cast(vl)) >> 12; + *intBuffer++ = static_cast(mono & 0xFFFF); + } while (--numFrames); + } + } +} + +void applyGainToInt32Buffer(int32_t* typedBuffer, const size_t bufferSizeBytes, const float gain) { + int numSamples = bufferSizeBytes / sizeof(int32_t); + if (numSamples == 0) { + return; + } + if (CC_UNLIKELY(gain > kUnityGainFloat)) { + do { + float multiplied = (*typedBuffer) * gain; + if (multiplied > INT32_MAX) { + *typedBuffer++ = INT32_MAX; + } else if (multiplied < INT32_MIN) { + *typedBuffer++ = INT32_MIN; + } else { + *typedBuffer++ = multiplied; + } + } while (--numSamples); + } else { + do { + *typedBuffer++ = (*typedBuffer) * gain; + } while (--numSamples); + } +} + +void applyGainToFloatBuffer(float* floatBuffer, const size_t bufferSizeBytes, const float gain) { + int numSamples = bufferSizeBytes / sizeof(float); + if (numSamples == 0) { + return; + } + if (CC_UNLIKELY(gain > kUnityGainFloat)) { + do { + *floatBuffer++ = std::clamp((*floatBuffer) * gain, -kUnityGainFloat, kUnityGainFloat); + } while (--numSamples); + } else { + do { + *floatBuffer++ = (*floatBuffer) * gain; + } while (--numSamples); + } +} + +} // namespace + +std::ostream& operator<<(std::ostream& os, const DeviceProfile& device) { + return os << "<" << device.card << "," << device.device << ">"; +} + +AudioChannelLayout getChannelLayoutMaskFromChannelCount(unsigned int channelCount, int isInput) { + return findValueOrDefault( + isInput ? getSupportedChannelInLayoutMap() : getSupportedChannelOutLayoutMap(), + channelCount, getInvalidChannelLayout()); +} + +AudioChannelLayout getChannelIndexMaskFromChannelCount(unsigned int channelCount) { + return findValueOrDefault(getSupportedChannelIndexLayoutMap(), channelCount, + getInvalidChannelLayout()); +} + +unsigned int getChannelCountFromChannelMask(const AudioChannelLayout& channelMask, bool isInput) { + switch (channelMask.getTag()) { + case AudioChannelLayout::Tag::layoutMask: { + return findKeyOrDefault( + isInput ? getSupportedChannelInLayoutMap() : getSupportedChannelOutLayoutMap(), + static_cast(getChannelCount(channelMask)), 0u /*defaultValue*/); + } + case AudioChannelLayout::Tag::indexMask: { + return findKeyOrDefault(getSupportedChannelIndexLayoutMap(), + static_cast(getChannelCount(channelMask)), + 0u /*defaultValue*/); + } + case AudioChannelLayout::Tag::none: + case AudioChannelLayout::Tag::invalid: + case AudioChannelLayout::Tag::voiceMask: + default: + return 0; + } +} + +std::vector getChannelMasksFromProfile(const alsa_device_profile* profile) { + const bool isInput = profile->direction == PCM_IN; + std::vector channels; + for (size_t i = 0; i < AUDIO_PORT_MAX_CHANNEL_MASKS && profile->channel_counts[i] != 0; ++i) { + auto layoutMask = + alsa::getChannelLayoutMaskFromChannelCount(profile->channel_counts[i], isInput); + if (layoutMask.getTag() == AudioChannelLayout::Tag::layoutMask) { + channels.push_back(layoutMask); + } + auto indexMask = alsa::getChannelIndexMaskFromChannelCount(profile->channel_counts[i]); + if (indexMask.getTag() == AudioChannelLayout::Tag::indexMask) { + channels.push_back(indexMask); + } + } + return channels; +} + +std::optional getDeviceProfile( + const ::aidl::android::media::audio::common::AudioDevice& audioDevice, bool isInput) { + if (audioDevice.address.getTag() != AudioDeviceAddress::Tag::alsa) { + LOG(ERROR) << __func__ << ": not alsa address: " << audioDevice.toString(); + return std::nullopt; + } + auto& alsaAddress = audioDevice.address.get(); + if (alsaAddress.size() != 2 || alsaAddress[0] < 0 || alsaAddress[1] < 0) { + LOG(ERROR) << __func__ + << ": malformed alsa address: " << ::android::internal::ToString(alsaAddress); + return std::nullopt; + } + return DeviceProfile{.card = alsaAddress[0], + .device = alsaAddress[1], + .direction = isInput ? PCM_IN : PCM_OUT, + .isExternal = !audioDevice.type.connection.empty()}; +} + +std::optional getDeviceProfile( + const ::aidl::android::media::audio::common::AudioPort& audioPort) { + if (audioPort.ext.getTag() != AudioPortExt::Tag::device) { + LOG(ERROR) << __func__ << ": port id " << audioPort.id << " is not a device port"; + return std::nullopt; + } + auto& devicePort = audioPort.ext.get(); + return getDeviceProfile(devicePort.device, audioPort.flags.getTag() == AudioIoFlags::input); +} + +std::optional getPcmConfig(const StreamContext& context, bool isInput) { + struct pcm_config config; + config.channels = alsa::getChannelCountFromChannelMask(context.getChannelLayout(), isInput); + if (config.channels == 0) { + LOG(ERROR) << __func__ << ": invalid channel=" << context.getChannelLayout().toString(); + return std::nullopt; + } + config.format = alsa::aidl2c_AudioFormatDescription_pcm_format(context.getFormat()); + if (config.format == PCM_FORMAT_INVALID) { + LOG(ERROR) << __func__ << ": invalid format=" << context.getFormat().toString(); + return std::nullopt; + } + config.rate = context.getSampleRate(); + if (config.rate == 0) { + LOG(ERROR) << __func__ << ": invalid sample rate=" << config.rate; + return std::nullopt; + } + return config; +} + +std::vector getSampleRatesFromProfile(const alsa_device_profile* profile) { + std::vector sampleRates; + for (int i = 0; i < std::min(MAX_PROFILE_SAMPLE_RATES, AUDIO_PORT_MAX_SAMPLING_RATES) && + profile->sample_rates[i] != 0; + i++) { + sampleRates.push_back(profile->sample_rates[i]); + } + return sampleRates; +} + +DeviceProxy openProxyForAttachedDevice(const DeviceProfile& deviceProfile, + struct pcm_config* pcmConfig, size_t bufferFrameCount) { + if (deviceProfile.isExternal) { + LOG(FATAL) << __func__ << ": called for an external device, address=" << deviceProfile; + } + DeviceProxy proxy(deviceProfile); + if (!profile_fill_builtin_device_info(proxy.getProfile(), pcmConfig, bufferFrameCount)) { + LOG(FATAL) << __func__ << ": failed to init for built-in device, address=" << deviceProfile; + } + if (int err = proxy_prepare_from_default_config(proxy.get(), proxy.getProfile()); err != 0) { + LOG(FATAL) << __func__ << ": fail to prepare for device address=" << deviceProfile + << " error=" << err; + return DeviceProxy(); + } + if (int err = proxy_open(proxy.get()); err != 0) { + LOG(ERROR) << __func__ << ": failed to open device, address=" << deviceProfile + << " error=" << err; + return DeviceProxy(); + } + return proxy; +} + +DeviceProxy openProxyForExternalDevice(const DeviceProfile& deviceProfile, + struct pcm_config* pcmConfig, bool requireExactMatch) { + if (!deviceProfile.isExternal) { + LOG(FATAL) << __func__ << ": called for an attached device, address=" << deviceProfile; + } + auto proxy = readAlsaDeviceInfo(deviceProfile); + if (proxy.get() == nullptr) { + return proxy; + } + if (int err = proxy_prepare(proxy.get(), proxy.getProfile(), pcmConfig, requireExactMatch); + err != 0) { + LOG(ERROR) << __func__ << ": fail to prepare for device address=" << deviceProfile + << " error=" << err; + return DeviceProxy(); + } + if (int err = proxy_open(proxy.get()); err != 0) { + LOG(ERROR) << __func__ << ": failed to open device, address=" << deviceProfile + << " error=" << err; + return DeviceProxy(); + } + return proxy; +} + +DeviceProxy readAlsaDeviceInfo(const DeviceProfile& deviceProfile) { + DeviceProxy proxy(deviceProfile); + if (!profile_read_device_info(proxy.getProfile())) { + LOG(ERROR) << __func__ << ": unable to read device info, device address=" << deviceProfile; + return DeviceProxy(); + } + return proxy; +} + +void resetTransferredFrames(DeviceProxy& proxy, uint64_t frames) { + if (proxy.get() != nullptr) { + proxy.get()->transferred = frames; + } +} + +AudioFormatDescription c2aidl_pcm_format_AudioFormatDescription(enum pcm_format legacy) { + return findValueOrDefault(getPcmFormatToAudioFormatDescMap(), legacy, AudioFormatDescription()); +} + +pcm_format aidl2c_AudioFormatDescription_pcm_format(const AudioFormatDescription& aidl) { + return findValueOrDefault(getAudioFormatDescriptorToPcmFormatMap(), aidl, PCM_FORMAT_INVALID); +} + +void applyGain(void* buffer, float gain, size_t bufferSizeBytes, enum pcm_format pcmFormat, + int channelCount) { + if (channelCount != 1 && channelCount != 2) { + LOG(WARNING) << __func__ << ": unsupported channel count " << channelCount; + return; + } + if (!getPcmFormatToAudioFormatDescMap().contains(pcmFormat)) { + LOG(WARNING) << __func__ << ": unsupported pcm format " << pcmFormat; + return; + } + if (std::abs(gain - kUnityGainFloat) < 1e-6) { + return; + } + switch (pcmFormat) { + case PCM_FORMAT_S16_LE: + applyGainToInt16Buffer(buffer, bufferSizeBytes, gain, channelCount); + break; + case PCM_FORMAT_FLOAT_LE: { + float* floatBuffer = (float*)buffer; + applyGainToFloatBuffer(floatBuffer, bufferSizeBytes, gain); + } break; + case PCM_FORMAT_S24_LE: + // PCM_FORMAT_S24_LE buffer is composed of signed fixed-point 32-bit Q8.23 data with + // min and max limits of the same bit representation as min and max limits of + // PCM_FORMAT_S32_LE buffer. + case PCM_FORMAT_S32_LE: { + int32_t* typedBuffer = (int32_t*)buffer; + applyGainToInt32Buffer(typedBuffer, bufferSizeBytes, gain); + } break; + case PCM_FORMAT_S24_3LE: { + int numSamples = bufferSizeBytes / (sizeof(uint8_t) * 3); + if (numSamples == 0) { + return; + } + std::unique_ptr typedBuffer(new int32_t[numSamples]); + memcpy_to_i32_from_p24(typedBuffer.get(), (uint8_t*)buffer, numSamples); + applyGainToInt32Buffer(typedBuffer.get(), numSamples * sizeof(int32_t), gain); + memcpy_to_p24_from_i32((uint8_t*)buffer, typedBuffer.get(), numSamples); + } break; + default: + LOG(FATAL) << __func__ << ": unsupported pcm format " << pcmFormat; + break; + } +} + +} // namespace aidl::android::hardware::audio::core::alsa diff --git a/audio/alsa/Utils.h b/audio/alsa/Utils.h new file mode 100644 index 0000000..53dcfd0 --- /dev/null +++ b/audio/alsa/Utils.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "core-impl/Stream.h" + +extern "C" { +#include +#include "alsa_device_profile.h" +#include "alsa_device_proxy.h" +} + +namespace aidl::android::hardware::audio::core::alsa { + +struct DeviceProfile { + int card; + int device; + int direction; /* PCM_OUT or PCM_IN */ + bool isExternal; +}; +std::ostream& operator<<(std::ostream& os, const DeviceProfile& device); + +class DeviceProxy { + public: + DeviceProxy(); // Constructs a "null" proxy. + explicit DeviceProxy(const DeviceProfile& deviceProfile); + alsa_device_profile* getProfile() const { return mProfile.get(); } + alsa_device_proxy* get() const { return mProxy.get(); } + + private: + static void alsaProxyDeleter(alsa_device_proxy* proxy); + using AlsaProxy = std::unique_ptr; + + std::unique_ptr mProfile; + AlsaProxy mProxy; +}; + +void applyGain(void* buffer, float gain, size_t bytesToTransfer, enum pcm_format pcmFormat, + int channelCount); +::aidl::android::media::audio::common::AudioChannelLayout getChannelLayoutMaskFromChannelCount( + unsigned int channelCount, int isInput); +::aidl::android::media::audio::common::AudioChannelLayout getChannelIndexMaskFromChannelCount( + unsigned int channelCount); +unsigned int getChannelCountFromChannelMask( + const ::aidl::android::media::audio::common::AudioChannelLayout& channelMask, bool isInput); +std::vector<::aidl::android::media::audio::common::AudioChannelLayout> getChannelMasksFromProfile( + const alsa_device_profile* profile); +std::optional getDeviceProfile( + const ::aidl::android::media::audio::common::AudioDevice& audioDevice, bool isInput); +std::optional getDeviceProfile( + const ::aidl::android::media::audio::common::AudioPort& audioPort); +std::optional getPcmConfig(const StreamContext& context, bool isInput); +std::vector getSampleRatesFromProfile(const alsa_device_profile* profile); +DeviceProxy openProxyForAttachedDevice(const DeviceProfile& deviceProfile, + struct pcm_config* pcmConfig, size_t bufferFrameCount); +DeviceProxy openProxyForExternalDevice(const DeviceProfile& deviceProfile, + struct pcm_config* pcmConfig, bool requireExactMatch); +DeviceProxy readAlsaDeviceInfo(const DeviceProfile& deviceProfile); +void resetTransferredFrames(DeviceProxy& proxy, uint64_t frames); + +::aidl::android::media::audio::common::AudioFormatDescription +c2aidl_pcm_format_AudioFormatDescription(enum pcm_format legacy); +pcm_format aidl2c_AudioFormatDescription_pcm_format( + const ::aidl::android::media::audio::common::AudioFormatDescription& aidl); + +} // namespace aidl::android::hardware::audio::core::alsa diff --git a/audio/android.hardware.audio.service-aidl.example.rc b/audio/android.hardware.audio.service-aidl.example.rc new file mode 100644 index 0000000..c3e19ba --- /dev/null +++ b/audio/android.hardware.audio.service-aidl.example.rc @@ -0,0 +1,24 @@ + +service vendor.audio-hal-aidl /apex/com.android.hardware.audio/bin/hw/android.hardware.audio.service-aidl.example + class hal + user audioserver + # media gid needed for /dev/fm (radio) and for /data/misc/media (tee) + group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct wakelock context_hub + capabilities BLOCK_SUSPEND SYS_NICE + # setting RLIMIT_RTPRIO allows binder RT priority inheritance + rlimit rtprio 10 10 + ioprio rt 4 + task_profiles ProcessCapacityHigh HighPerformance + onrestart restart audioserver + +service vendor.audio-effect-hal-aidl /apex/com.android.hardware.audio/bin/hw/android.hardware.audio.effect.service-aidl.example + class hal + user audioserver + # media gid needed for /dev/fm (radio) and for /data/misc/media (tee) + group audio media + capabilities BLOCK_SUSPEND + # setting RLIMIT_RTPRIO allows binder RT priority inheritance + rlimit rtprio 10 10 + ioprio rt 4 + task_profiles ProcessCapacityHigh HighPerformance + onrestart restart audioserver \ No newline at end of file diff --git a/audio/android.hardware.audio.service-aidl.xml b/audio/android.hardware.audio.service-aidl.xml new file mode 100644 index 0000000..27f48e2 --- /dev/null +++ b/audio/android.hardware.audio.service-aidl.xml @@ -0,0 +1,39 @@ + + + android.hardware.audio.core + 3 + IModule/default + + + android.hardware.audio.core + 3 + IModule/r_submix + + + android.hardware.audio.core + 3 + IModule/bluetooth + + + android.hardware.audio.core + 3 + IConfig/default + + + + android.hardware.audio.effect + 3 + IFactory/default + + diff --git a/audio/apex/com.android.hardware.audio/Android.bp b/audio/apex/com.android.hardware.audio/Android.bp new file mode 100644 index 0000000..8fa429a --- /dev/null +++ b/audio/apex/com.android.hardware.audio/Android.bp @@ -0,0 +1,58 @@ +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +apex { + name: "com.android.hardware.audio", + manifest: "manifest.json", + file_contexts: "file_contexts", + key: "com.android.hardware.key", + certificate: ":com.android.hardware.certificate", + updatable: false, + vendor: true, + + binaries: [ + "android.hardware.audio.service-aidl.example", + "android.hardware.audio.effect.service-aidl.example", + ], + native_shared_libs: [ + "libaecsw", + "libagc1sw", + "libagc2sw", + "libbassboostsw", + "libbundleaidl", + "libdownmixaidl", + "libdynamicsprocessingaidl", + "libenvreverbsw", + "libequalizersw", + "libextensioneffect", + "libhapticgeneratoraidl", + "libloudnessenhanceraidl", + "libnssw", + "libpreprocessingaidl", + "libpresetreverbsw", + "libreverbaidl", + "libspatializersw", + "libvirtualizersw", + "libvisualizeraidl", + "libvolumesw", + ], + prebuilts: [ + "android.hardware.audio.service-aidl.example.rc", + "android.hardware.audio.service-aidl.xml", + "android.hardware.bluetooth.audio.xml", + ], + required: [ + "aidl_audio_set_configurations_bfbs", + "aidl_default_audio_set_configurations_json", + "aidl_audio_set_scenarios_bfbs", + "aidl_default_audio_set_scenarios_json", + "hfp_codec_capabilities_xml", + ], +} diff --git a/audio/apex/com.android.hardware.audio/file_contexts b/audio/apex/com.android.hardware.audio/file_contexts new file mode 100644 index 0000000..41a6ada --- /dev/null +++ b/audio/apex/com.android.hardware.audio/file_contexts @@ -0,0 +1,4 @@ +(/.*)? u:object_r:vendor_file:s0 +/etc(/.*)? u:object_r:vendor_configs_file:s0 +/bin/hw/android\.hardware\.audio\.service-aidl\.example u:object_r:hal_audio_default_exec:s0 +/bin/hw/android\.hardware\.audio\.effect\.service-aidl\.example u:object_r:hal_audio_default_exec:s0 \ No newline at end of file diff --git a/audio/apex/com.android.hardware.audio/manifest.json b/audio/apex/com.android.hardware.audio/manifest.json new file mode 100644 index 0000000..42a2368 --- /dev/null +++ b/audio/apex/com.android.hardware.audio/manifest.json @@ -0,0 +1,4 @@ +{ + "name": "com.android.hardware.audio", + "version": 1 +} diff --git a/audio/audio_effects_config.xml b/audio/audio_effects_config.xml new file mode 100644 index 0000000..2e860d8 --- /dev/null +++ b/audio/audio_effects_config.xml @@ -0,0 +1,178 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/audio/automaticGainControlV1/Android.bp b/audio/automaticGainControlV1/Android.bp new file mode 100644 index 0000000..2fea719 --- /dev/null +++ b/audio/automaticGainControlV1/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libagc1sw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "AutomaticGainControlV1Sw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/automaticGainControlV1/AutomaticGainControlV1Sw.cpp b/audio/automaticGainControlV1/AutomaticGainControlV1Sw.cpp new file mode 100644 index 0000000..d865b7e --- /dev/null +++ b/audio/automaticGainControlV1/AutomaticGainControlV1Sw.cpp @@ -0,0 +1,224 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_AutomaticGainControlV1Sw" + +#include +#include + +#include "AutomaticGainControlV1Sw.h" + +using aidl::android::hardware::audio::effect::AutomaticGainControlV1Sw; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV1Sw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV1Sw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV1Sw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = AutomaticGainControlV1Sw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string AutomaticGainControlV1Sw::kEffectName = "AutomaticGainControlV1Sw"; + +const std::vector AutomaticGainControlV1Sw::kRanges = { + MAKE_RANGE(AutomaticGainControlV1, targetPeakLevelDbFs, -3100, 0), + MAKE_RANGE(AutomaticGainControlV1, maxCompressionGainDb, 0, 9000)}; + +const Capability AutomaticGainControlV1Sw::kCapability = { + .range = AutomaticGainControlV1Sw::kRanges}; + +const Descriptor AutomaticGainControlV1Sw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV1(), + .uuid = getEffectImplUuidAutomaticGainControlV1Sw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = AutomaticGainControlV1Sw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = AutomaticGainControlV1Sw::kCapability}; + +ndk::ScopedAStatus AutomaticGainControlV1Sw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus AutomaticGainControlV1Sw::setParameterSpecific( + const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::automaticGainControlV1 != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = param.getTag(); + switch (tag) { + case AutomaticGainControlV1::targetPeakLevelDbFs: { + RETURN_IF(mContext->setTargetPeakLevel( + param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "targetPeakLevelNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case AutomaticGainControlV1::maxCompressionGainDb: { + RETURN_IF(mContext->setMaxCompressionGain( + param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "maxCompressionGainNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case AutomaticGainControlV1::enableLimiter: { + RETURN_IF( + mContext->setEnableLimiter( + param.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "enableLimiterNotSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported"); + } + } +} + +ndk::ScopedAStatus AutomaticGainControlV1Sw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::automaticGainControlV1Tag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto specificId = id.get(); + auto specificIdTag = specificId.getTag(); + switch (specificIdTag) { + case AutomaticGainControlV1::Id::commonTag: + return getParameterAutomaticGainControlV1( + specificId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported"); + } +} + +ndk::ScopedAStatus AutomaticGainControlV1Sw::getParameterAutomaticGainControlV1( + const AutomaticGainControlV1::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + AutomaticGainControlV1 param; + switch (tag) { + case AutomaticGainControlV1::targetPeakLevelDbFs: { + param.set(mContext->getTargetPeakLevel()); + break; + } + case AutomaticGainControlV1::maxCompressionGainDb: { + param.set( + mContext->getMaxCompressionGain()); + break; + } + case AutomaticGainControlV1::enableLimiter: { + param.set(mContext->getEnableLimiter()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported"); + } + } + + specific->set(param); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr AutomaticGainControlV1Sw::createContext( + const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = + std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode AutomaticGainControlV1Sw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status AutomaticGainControlV1Sw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode AutomaticGainControlV1SwContext::setTargetPeakLevel(int targetPeakLevel) { + mTargetPeakLevel = targetPeakLevel; + return RetCode::SUCCESS; +} + +int AutomaticGainControlV1SwContext::getTargetPeakLevel() { + return mTargetPeakLevel; +} + +RetCode AutomaticGainControlV1SwContext::setMaxCompressionGain(int maxCompressionGain) { + mMaxCompressionGain = maxCompressionGain; + return RetCode::SUCCESS; +} + +int AutomaticGainControlV1SwContext::getMaxCompressionGain() { + return mMaxCompressionGain; +} + +RetCode AutomaticGainControlV1SwContext::setEnableLimiter(bool enableLimiter) { + mEnableLimiter = enableLimiter; + return RetCode::SUCCESS; +} + +bool AutomaticGainControlV1SwContext::getEnableLimiter() { + return mEnableLimiter; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/automaticGainControlV1/AutomaticGainControlV1Sw.h b/audio/automaticGainControlV1/AutomaticGainControlV1Sw.h new file mode 100644 index 0000000..76b91ae --- /dev/null +++ b/audio/automaticGainControlV1/AutomaticGainControlV1Sw.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class AutomaticGainControlV1SwContext final : public EffectContext { + public: + AutomaticGainControlV1SwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setTargetPeakLevel(int targetPeakLevel); + int getTargetPeakLevel(); + RetCode setMaxCompressionGain(int maxCompressionGainDb); + int getMaxCompressionGain(); + RetCode setEnableLimiter(bool enableLimiter); + bool getEnableLimiter(); + + private: + int mTargetPeakLevel = 0; + int mMaxCompressionGain = 0; + bool mEnableLimiter = false; +}; + +class AutomaticGainControlV1Sw final : public EffectImpl { + public: + static const std::string kEffectName; + static const bool kStrengthSupported; + static const Capability kCapability; + static const Descriptor kDescriptor; + AutomaticGainControlV1Sw() { LOG(DEBUG) << __func__; } + ~AutomaticGainControlV1Sw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/automaticGainControlV2/Android.bp b/audio/automaticGainControlV2/Android.bp new file mode 100644 index 0000000..dda4e51 --- /dev/null +++ b/audio/automaticGainControlV2/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libagc2sw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "AutomaticGainControlV2Sw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/automaticGainControlV2/AutomaticGainControlV2Sw.cpp b/audio/automaticGainControlV2/AutomaticGainControlV2Sw.cpp new file mode 100644 index 0000000..3ff6e38 --- /dev/null +++ b/audio/automaticGainControlV2/AutomaticGainControlV2Sw.cpp @@ -0,0 +1,228 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#define LOG_TAG "AHAL_AutomaticGainControlV2Sw" + +#include +#include +#include + +#include "AutomaticGainControlV2Sw.h" + +using aidl::android::hardware::audio::effect::AutomaticGainControlV2Sw; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV2Sw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV2Sw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV2Sw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = AutomaticGainControlV2Sw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string AutomaticGainControlV2Sw::kEffectName = "AutomaticGainControlV2Sw"; + +const std::vector AutomaticGainControlV2Sw::kRanges = { + MAKE_RANGE(AutomaticGainControlV2, fixedDigitalGainMb, 0, 50000), + MAKE_RANGE(AutomaticGainControlV2, saturationMarginMb, 0, 10000)}; + +const Capability AutomaticGainControlV2Sw::kCapability = { + .range = AutomaticGainControlV2Sw::kRanges}; + +const Descriptor AutomaticGainControlV2Sw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV2(), + .uuid = getEffectImplUuidAutomaticGainControlV2Sw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = AutomaticGainControlV2Sw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = AutomaticGainControlV2Sw::kCapability}; + +ndk::ScopedAStatus AutomaticGainControlV2Sw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus AutomaticGainControlV2Sw::setParameterSpecific( + const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::automaticGainControlV2 != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = param.getTag(); + switch (tag) { + case AutomaticGainControlV2::fixedDigitalGainMb: { + RETURN_IF(mContext->setDigitalGain( + param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "digitalGainNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case AutomaticGainControlV2::levelEstimator: { + RETURN_IF(mContext->setLevelEstimator( + param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "levelEstimatorNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case AutomaticGainControlV2::saturationMarginMb: { + RETURN_IF(mContext->setSaturationMargin( + param.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "saturationMarginNotSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported"); + } + } +} + +ndk::ScopedAStatus AutomaticGainControlV2Sw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::automaticGainControlV2Tag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto specificId = id.get(); + auto specificIdTag = specificId.getTag(); + switch (specificIdTag) { + case AutomaticGainControlV2::Id::commonTag: + return getParameterAutomaticGainControlV2( + specificId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported"); + } +} + +ndk::ScopedAStatus AutomaticGainControlV2Sw::getParameterAutomaticGainControlV2( + const AutomaticGainControlV2::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + AutomaticGainControlV2 param; + switch (tag) { + case AutomaticGainControlV2::fixedDigitalGainMb: { + param.set(mContext->getDigitalGain()); + break; + } + case AutomaticGainControlV2::levelEstimator: { + param.set(mContext->getLevelEstimator()); + break; + } + case AutomaticGainControlV2::saturationMarginMb: { + param.set(mContext->getSaturationMargin()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported"); + } + } + + specific->set(param); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr AutomaticGainControlV2Sw::createContext( + const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = + std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode AutomaticGainControlV2Sw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status AutomaticGainControlV2Sw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode AutomaticGainControlV2SwContext::setDigitalGain(int gain) { + mDigitalGain = gain; + return RetCode::SUCCESS; +} + +int AutomaticGainControlV2SwContext::getDigitalGain() { + return mDigitalGain; +} + +RetCode AutomaticGainControlV2SwContext::setLevelEstimator( + AutomaticGainControlV2::LevelEstimator levelEstimator) { + mLevelEstimator = levelEstimator; + return RetCode::SUCCESS; +} + +AutomaticGainControlV2::LevelEstimator AutomaticGainControlV2SwContext::getLevelEstimator() { + return mLevelEstimator; +} + +RetCode AutomaticGainControlV2SwContext::setSaturationMargin(int margin) { + mSaturationMargin = margin; + return RetCode::SUCCESS; +} + +int AutomaticGainControlV2SwContext::getSaturationMargin() { + return mSaturationMargin; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/automaticGainControlV2/AutomaticGainControlV2Sw.h b/audio/automaticGainControlV2/AutomaticGainControlV2Sw.h new file mode 100644 index 0000000..863d470 --- /dev/null +++ b/audio/automaticGainControlV2/AutomaticGainControlV2Sw.h @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class AutomaticGainControlV2SwContext final : public EffectContext { + public: + AutomaticGainControlV2SwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setDigitalGain(int gain); + int getDigitalGain(); + RetCode setLevelEstimator(AutomaticGainControlV2::LevelEstimator levelEstimator); + AutomaticGainControlV2::LevelEstimator getLevelEstimator(); + RetCode setSaturationMargin(int margin); + int getSaturationMargin(); + + private: + int mDigitalGain = 0; + AutomaticGainControlV2::LevelEstimator mLevelEstimator = + AutomaticGainControlV2::LevelEstimator::RMS; + int mSaturationMargin = 0; +}; + +class AutomaticGainControlV2Sw final : public EffectImpl { + public: + static const std::string kEffectName; + static const bool kStrengthSupported; + static const Capability kCapability; + static const Descriptor kDescriptor; + AutomaticGainControlV2Sw() { LOG(DEBUG) << __func__; } + ~AutomaticGainControlV2Sw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/bassboost/Android.bp b/audio/bassboost/Android.bp new file mode 100644 index 0000000..42223b4 --- /dev/null +++ b/audio/bassboost/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libbassboostsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "BassBoostSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/bassboost/BassBoostSw.cpp b/audio/bassboost/BassBoostSw.cpp new file mode 100644 index 0000000..60adc30 --- /dev/null +++ b/audio/bassboost/BassBoostSw.cpp @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define LOG_TAG "AHAL_BassBoostSw" +#include +#include +#include + +#include "BassBoostSw.h" + +using aidl::android::hardware::audio::effect::BassBoostSw; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostProxy; +using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidBassBoostSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidBassBoostSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = BassBoostSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string BassBoostSw::kEffectName = "BassBoostSw"; + +const std::vector BassBoostSw::kRanges = { + MAKE_RANGE(BassBoost, strengthPm, 0, 1000)}; +const Capability BassBoostSw::kCapability = {.range = {BassBoostSw::kRanges}}; +const Descriptor BassBoostSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidBassBoost(), + .uuid = getEffectImplUuidBassBoostSw()}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = BassBoostSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = BassBoostSw::kCapability}; + +ndk::ScopedAStatus BassBoostSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus BassBoostSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::bassBoost != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& bbParam = specific.get(); + RETURN_IF(!inRange(bbParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = bbParam.getTag(); + + switch (tag) { + case BassBoost::strengthPm: { + const auto strength = bbParam.get(); + RETURN_IF(mContext->setBbStrengthPm(strength) != RetCode::SUCCESS, EX_ILLEGAL_ARGUMENT, + "strengthPmNotSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "BassBoostTagNotSupported"); + } + } +} + +ndk::ScopedAStatus BassBoostSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::bassBoostTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto bbId = id.get(); + auto bbIdTag = bbId.getTag(); + switch (bbIdTag) { + case BassBoost::Id::commonTag: + return getParameterBassBoost(bbId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "BassBoostTagNotSupported"); + } +} + +ndk::ScopedAStatus BassBoostSw::getParameterBassBoost(const BassBoost::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + BassBoost bbParam; + switch (tag) { + case BassBoost::strengthPm: { + bbParam.set(mContext->getBbStrengthPm()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "BassBoostTagNotSupported"); + } + } + + specific->set(bbParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr BassBoostSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode BassBoostSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status BassBoostSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode BassBoostSwContext::setBbStrengthPm(int strength) { + mStrength = strength; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/bassboost/BassBoostSw.h b/audio/bassboost/BassBoostSw.h new file mode 100644 index 0000000..901e455 --- /dev/null +++ b/audio/bassboost/BassBoostSw.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class BassBoostSwContext final : public EffectContext { + public: + BassBoostSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setBbStrengthPm(int strength); + int getBbStrengthPm() const { return mStrength; } + + private: + int mStrength = 0; +}; + +class BassBoostSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + BassBoostSw() { LOG(DEBUG) << __func__; } + ~BassBoostSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Tag& tag, + Parameter::Specific* specific) REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/bluetooth/DevicePortProxy.cpp b/audio/bluetooth/DevicePortProxy.cpp new file mode 100644 index 0000000..d772c20 --- /dev/null +++ b/audio/bluetooth/DevicePortProxy.cpp @@ -0,0 +1,578 @@ +/* + * Copyright 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_BluetoothAudioPort" + +#include +#include +#include +#include + +#include "BluetoothAudioSessionControl.h" +#include "core-impl/DevicePortProxy.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::bluetooth::audio::AudioConfiguration; +using aidl::android::hardware::bluetooth::audio::BluetoothAudioSessionControl; +using aidl::android::hardware::bluetooth::audio::BluetoothAudioStatus; +using aidl::android::hardware::bluetooth::audio::ChannelMode; +using aidl::android::hardware::bluetooth::audio::PcmConfiguration; +using aidl::android::hardware::bluetooth::audio::PortStatusCallbacks; +using aidl::android::hardware::bluetooth::audio::PresentationPosition; +using aidl::android::hardware::bluetooth::audio::SessionType; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using android::base::StringPrintf; + +namespace android::bluetooth::audio::aidl { + +namespace { + +// The maximum time to wait in std::condition_variable::wait_for() +constexpr unsigned int kMaxWaitingTimeMs = 4500; + +} // namespace + +std::ostream& operator<<(std::ostream& os, const BluetoothStreamState& state) { + switch (state) { + case BluetoothStreamState::DISABLED: + return os << "DISABLED"; + case BluetoothStreamState::STANDBY: + return os << "STANDBY"; + case BluetoothStreamState::STARTING: + return os << "STARTING"; + case BluetoothStreamState::STARTED: + return os << "STARTED"; + case BluetoothStreamState::SUSPENDING: + return os << "SUSPENDING"; + case BluetoothStreamState::UNKNOWN: + return os << "UNKNOWN"; + default: + return os << android::base::StringPrintf("%#hhx", state); + } +} + +BluetoothAudioPortAidl::BluetoothAudioPortAidl() + : mCookie(::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined), + mState(BluetoothStreamState::DISABLED), + mSessionType(SessionType::UNKNOWN) {} + +BluetoothAudioPortAidl::~BluetoothAudioPortAidl() { + unregisterPort(); +} + +bool BluetoothAudioPortAidl::registerPort(const AudioDeviceDescription& description) { + if (inUse()) { + LOG(ERROR) << __func__ << debugMessage() << " already in use"; + return false; + } + + if (!initSessionType(description)) return false; + + auto control_result_cb = [port = this](uint16_t cookie, bool start_resp, + const BluetoothAudioStatus& status) { + (void)start_resp; + port->controlResultHandler(cookie, status); + }; + auto session_changed_cb = [port = this](uint16_t cookie) { + port->sessionChangedHandler(cookie); + }; + // TODO: Add audio_config_changed_cb + PortStatusCallbacks cbacks = { + .control_result_cb_ = control_result_cb, + .session_changed_cb_ = session_changed_cb, + }; + mCookie = BluetoothAudioSessionControl::RegisterControlResultCback(mSessionType, cbacks); + auto isOk = (mCookie != ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined); + if (isOk) { + std::lock_guard guard(mCvMutex); + mState = BluetoothStreamState::STANDBY; + } + LOG(DEBUG) << __func__ << debugMessage(); + return isOk; +} + +bool BluetoothAudioPortAidl::initSessionType(const AudioDeviceDescription& description) { + if (description.connection == AudioDeviceDescription::CONNECTION_BT_A2DP && + (description.type == AudioDeviceType::OUT_DEVICE || + description.type == AudioDeviceType::OUT_HEADPHONE || + description.type == AudioDeviceType::OUT_SPEAKER)) { + LOG(VERBOSE) << __func__ + << ": device=AUDIO_DEVICE_OUT_BLUETOOTH_A2DP (HEADPHONES/SPEAKER) (" + << description.toString() << ")"; + mSessionType = SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH; + } else if (description.connection == AudioDeviceDescription::CONNECTION_WIRELESS && + description.type == AudioDeviceType::OUT_HEARING_AID) { + LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_HEARING_AID (MEDIA/VOICE) (" + << description.toString() << ")"; + mSessionType = SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH; + } else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE && + description.type == AudioDeviceType::OUT_HEADSET) { + LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_HEADSET (MEDIA/VOICE) (" + << description.toString() << ")"; + mSessionType = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH; + } else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE && + description.type == AudioDeviceType::OUT_SPEAKER) { + LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_SPEAKER (MEDIA) (" + << description.toString() << ")"; + mSessionType = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH; + } else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE && + description.type == AudioDeviceType::IN_HEADSET) { + LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_IN_BLE_HEADSET (VOICE) (" + << description.toString() << ")"; + mSessionType = SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH; + } else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE && + description.type == AudioDeviceType::OUT_BROADCAST) { + LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_BROADCAST (MEDIA) (" + << description.toString() << ")"; + mSessionType = SessionType::LE_AUDIO_BROADCAST_SOFTWARE_ENCODING_DATAPATH; + } else { + LOG(ERROR) << __func__ << ": unknown device=" << description.toString(); + return false; + } + + if (!BluetoothAudioSessionControl::IsSessionReady(mSessionType)) { + LOG(ERROR) << __func__ << ": device=" << description.toString() + << ", session_type=" << toString(mSessionType) << " is not ready"; + return false; + } + return true; +} + +void BluetoothAudioPortAidl::unregisterPort() { + if (!inUse()) { + LOG(WARNING) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return; + } + BluetoothAudioSessionControl::UnregisterControlResultCback(mSessionType, mCookie); + mCookie = ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined; + LOG(VERBOSE) << __func__ << debugMessage() << " port unregistered"; +} + +void BluetoothAudioPortAidl::controlResultHandler(uint16_t cookie, + const BluetoothAudioStatus& status) { + std::lock_guard guard(mCvMutex); + if (!inUse()) { + LOG(ERROR) << "control_result_cb: BluetoothAudioPortAidl is not in use"; + return; + } + if (mCookie != cookie) { + LOG(ERROR) << "control_result_cb: proxy of device port (cookie=" + << StringPrintf("%#hx", cookie) << ") is corrupted"; + return; + } + BluetoothStreamState previous_state = mState; + LOG(INFO) << "control_result_cb:" << debugMessage() << ", previous_state=" << previous_state + << ", status=" << toString(status); + + switch (previous_state) { + case BluetoothStreamState::STARTED: + /* Only Suspend signal can be send in STARTED state*/ + if (status == BluetoothAudioStatus::RECONFIGURATION || + status == BluetoothAudioStatus::SUCCESS) { + mState = BluetoothStreamState::STANDBY; + } else { + LOG(WARNING) << StringPrintf( + "control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, " + "previous_state=%#hhx", + toString(status).c_str(), toString(mSessionType).c_str(), mCookie, + previous_state); + } + break; + case BluetoothStreamState::STARTING: + if (status == BluetoothAudioStatus::SUCCESS) { + mState = BluetoothStreamState::STARTED; + } else { + // Set to standby since the stack may be busy switching between outputs + LOG(WARNING) << StringPrintf( + "control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, " + "previous_state=%#hhx", + toString(status).c_str(), toString(mSessionType).c_str(), mCookie, + previous_state); + mState = BluetoothStreamState::STANDBY; + } + break; + case BluetoothStreamState::SUSPENDING: + if (status == BluetoothAudioStatus::SUCCESS) { + mState = BluetoothStreamState::STANDBY; + } else { + // It will be failed if the headset is disconnecting, and set to disable + // to wait for re-init again + LOG(WARNING) << StringPrintf( + "control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, " + "previous_state=%#hhx", + toString(status).c_str(), toString(mSessionType).c_str(), mCookie, + previous_state); + mState = BluetoothStreamState::DISABLED; + } + break; + default: + LOG(ERROR) << "control_result_cb: unexpected previous_state=" + << StringPrintf( + "control_result_cb: status=%s failure for session_type= %s, " + "cookie=%#hx, previous_state=%#hhx", + toString(status).c_str(), toString(mSessionType).c_str(), mCookie, + previous_state); + return; + } + mInternalCv.notify_all(); +} + +void BluetoothAudioPortAidl::sessionChangedHandler(uint16_t cookie) { + std::lock_guard guard(mCvMutex); + if (!inUse()) { + LOG(ERROR) << "session_changed_cb: BluetoothAudioPortAidl is not in use"; + return; + } + if (mCookie != cookie) { + LOG(ERROR) << "session_changed_cb: proxy of device port (cookie=" + << StringPrintf("%#hx", cookie) << ") is corrupted"; + return; + } + BluetoothStreamState previous_state = mState; + LOG(VERBOSE) << "session_changed_cb:" << debugMessage() + << ", previous_state=" << previous_state; + mState = BluetoothStreamState::DISABLED; + mInternalCv.notify_all(); +} + +bool BluetoothAudioPortAidl::inUse() const { + return (mCookie != ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined); +} + +bool BluetoothAudioPortAidl::getPreferredDataIntervalUs(size_t& interval_us) const { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + + const AudioConfiguration& hal_audio_cfg = + BluetoothAudioSessionControl::GetAudioConfig(mSessionType); + if (hal_audio_cfg.getTag() != AudioConfiguration::pcmConfig) { + LOG(ERROR) << __func__ << ": unsupported audio cfg tag"; + return false; + } + + interval_us = hal_audio_cfg.get().dataIntervalUs; + return true; +} + +bool BluetoothAudioPortAidl::loadAudioConfig(PcmConfiguration& audio_cfg) { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + + const AudioConfiguration& hal_audio_cfg = + BluetoothAudioSessionControl::GetAudioConfig(mSessionType); + if (hal_audio_cfg.getTag() != AudioConfiguration::pcmConfig) { + LOG(ERROR) << __func__ << ": unsupported audio cfg tag"; + return false; + } + audio_cfg = hal_audio_cfg.get(); + LOG(VERBOSE) << __func__ << debugMessage() << ", state*=" << getState() << ", PcmConfig=[" + << audio_cfg.toString() << "]"; + if (audio_cfg.channelMode == ChannelMode::UNKNOWN) { + return false; + } + return true; +} + +bool BluetoothAudioPortAidlOut::loadAudioConfig(PcmConfiguration& audio_cfg) { + if (!BluetoothAudioPortAidl::loadAudioConfig(audio_cfg)) return false; + // WAR to support Mono / 16 bits per sample as the Bluetooth stack requires + if (audio_cfg.channelMode == ChannelMode::MONO && audio_cfg.bitsPerSample == 16) { + mIsStereoToMono = true; + audio_cfg.channelMode = ChannelMode::STEREO; + LOG(INFO) << __func__ << ": force channels = to be AUDIO_CHANNEL_OUT_STEREO"; + } + return true; +} + +bool BluetoothAudioPortAidl::standby() { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + std::lock_guard guard(mCvMutex); + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request"; + if (mState == BluetoothStreamState::DISABLED) { + mState = BluetoothStreamState::STANDBY; + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " done"; + return true; + } + return false; +} + +bool BluetoothAudioPortAidl::condWaitState(BluetoothStreamState state) { + const auto waitTime = std::chrono::milliseconds(kMaxWaitingTimeMs); + std::unique_lock lock(mCvMutex); + base::ScopedLockAssertion lock_assertion(mCvMutex); + switch (state) { + case BluetoothStreamState::STARTING: { + LOG(VERBOSE) << __func__ << debugMessage() << " waiting for STARTED"; + mInternalCv.wait_for(lock, waitTime, [this] { + base::ScopedLockAssertion lock_assertion(mCvMutex); + return mState != BluetoothStreamState::STARTING; + }); + return mState == BluetoothStreamState::STARTED; + } + case BluetoothStreamState::SUSPENDING: { + LOG(VERBOSE) << __func__ << debugMessage() << " waiting for SUSPENDED"; + mInternalCv.wait_for(lock, waitTime, [this] { + base::ScopedLockAssertion lock_assertion(mCvMutex); + return mState != BluetoothStreamState::SUSPENDING; + }); + return mState == BluetoothStreamState::STANDBY; + } + default: + LOG(WARNING) << __func__ << debugMessage() << " waiting for KNOWN"; + return false; + } + return false; +} + +bool BluetoothAudioPortAidl::start() { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() + << ", mono=" << (mIsStereoToMono ? "true" : "false") << " request"; + + { + std::unique_lock lock(mCvMutex); + base::ScopedLockAssertion lock_assertion(mCvMutex); + if (mState == BluetoothStreamState::STARTED) { + return true; // nop, return + } else if (mState == BluetoothStreamState::SUSPENDING || + mState == BluetoothStreamState::STARTING) { + /* If port is in transient state, give some time to respond */ + auto state_ = mState; + lock.unlock(); + if (!condWaitState(state_)) { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure"; + return false; + } + } + } + + bool retval = false; + { + std::unique_lock lock(mCvMutex); + base::ScopedLockAssertion lock_assertion(mCvMutex); + if (mState == BluetoothStreamState::STARTED) { + retval = true; + } else if (mState == BluetoothStreamState::STANDBY) { + mState = BluetoothStreamState::STARTING; + lock.unlock(); + if (BluetoothAudioSessionControl::StartStream(mSessionType)) { + retval = condWaitState(BluetoothStreamState::STARTING); + } else { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() + << " Hal fails"; + } + } + } + + if (retval) { + LOG(INFO) << __func__ << debugMessage() << ", state=" << getState() + << ", mono=" << (mIsStereoToMono ? "true" : "false") << " done"; + } else { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure"; + } + + return retval; // false if any failure like timeout +} + +bool BluetoothAudioPortAidl::suspend() { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request"; + + { + std::unique_lock lock(mCvMutex); + base::ScopedLockAssertion lock_assertion(mCvMutex); + if (mState == BluetoothStreamState::STANDBY) { + return true; // nop, return + } else if (mState == BluetoothStreamState::SUSPENDING || + mState == BluetoothStreamState::STARTING) { + /* If port is in transient state, give some time to respond */ + auto state_ = mState; + lock.unlock(); + if (!condWaitState(state_)) { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure"; + return false; + } + } + } + + bool retval = false; + { + std::unique_lock lock(mCvMutex); + base::ScopedLockAssertion lock_assertion(mCvMutex); + if (mState == BluetoothStreamState::STANDBY) { + retval = true; + } else if (mState == BluetoothStreamState::STARTED) { + mState = BluetoothStreamState::SUSPENDING; + lock.unlock(); + if (BluetoothAudioSessionControl::SuspendStream(mSessionType)) { + retval = condWaitState(BluetoothStreamState::SUSPENDING); + } else { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() + << " failure to suspend stream"; + } + } + } + + if (retval) { + LOG(INFO) << __func__ << debugMessage() << ", state=" << getState() << " done"; + } else { + LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure"; + } + + return retval; // false if any failure like timeout +} + +void BluetoothAudioPortAidl::stop() { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return; + } + std::lock_guard guard(mCvMutex); + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request"; + if (mState != BluetoothStreamState::DISABLED) { + BluetoothAudioSessionControl::StopStream(mSessionType); + mState = BluetoothStreamState::DISABLED; + } + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " done"; +} + +size_t BluetoothAudioPortAidlOut::writeData(const void* buffer, size_t bytes) const { + if (!buffer) { + LOG(ERROR) << __func__ << ": bad input arg"; + return 0; + } + + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return 0; + } + + if (!mIsStereoToMono) { + return BluetoothAudioSessionControl::OutWritePcmData(mSessionType, buffer, bytes); + } + + // WAR to mix the stereo into Mono (16 bits per sample) + const size_t write_frames = bytes >> 2; + if (write_frames == 0) return 0; + auto src = static_cast(buffer); + std::unique_ptr dst{new int16_t[write_frames]}; + downmix_to_mono_i16_from_stereo_i16(dst.get(), src, write_frames); + // a frame is 16 bits, and the size of a mono frame is equal to half a stereo. + auto totalWrite = BluetoothAudioSessionControl::OutWritePcmData(mSessionType, dst.get(), + write_frames * 2); + return totalWrite * 2; +} + +size_t BluetoothAudioPortAidlIn::readData(void* buffer, size_t bytes) const { + if (!buffer) { + LOG(ERROR) << __func__ << ": bad input arg"; + return 0; + } + + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return 0; + } + + return BluetoothAudioSessionControl::InReadPcmData(mSessionType, buffer, bytes); +} + +bool BluetoothAudioPortAidl::getPresentationPosition( + PresentationPosition& presentation_position) const { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + bool retval = BluetoothAudioSessionControl::GetPresentationPosition(mSessionType, + presentation_position); + LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() + << presentation_position.toString(); + + return retval; +} + +bool BluetoothAudioPortAidl::updateSourceMetadata(const SourceMetadata& source_metadata) const { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + LOG(DEBUG) << __func__ << debugMessage() << ", state=" << getState() << ", " + << source_metadata.tracks.size() << " track(s)"; + if (source_metadata.tracks.size() == 0) return true; + return BluetoothAudioSessionControl::UpdateSourceMetadata(mSessionType, source_metadata); +} + +bool BluetoothAudioPortAidl::updateSinkMetadata(const SinkMetadata& sink_metadata) const { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + LOG(DEBUG) << __func__ << debugMessage() << ", state=" << getState() << ", " + << sink_metadata.tracks.size() << " track(s)"; + if (sink_metadata.tracks.size() == 0) return true; + return BluetoothAudioSessionControl::UpdateSinkMetadata(mSessionType, sink_metadata); +} + +BluetoothStreamState BluetoothAudioPortAidl::getState() const { + return mState; +} + +bool BluetoothAudioPortAidl::setState(BluetoothStreamState state) { + if (!inUse()) { + LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use"; + return false; + } + std::lock_guard guard(mCvMutex); + LOG(DEBUG) << __func__ << ": BluetoothAudioPortAidl old state = " << mState + << " new state = " << state; + mState = state; + return true; +} + +bool BluetoothAudioPortAidl::isA2dp() const { + return mSessionType == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH || + mSessionType == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH; +} + +bool BluetoothAudioPortAidl::isLeAudio() const { + return mSessionType == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH || + mSessionType == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH || + mSessionType == SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH || + mSessionType == SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH || + mSessionType == SessionType::LE_AUDIO_BROADCAST_SOFTWARE_ENCODING_DATAPATH || + mSessionType == SessionType::LE_AUDIO_BROADCAST_HARDWARE_OFFLOAD_ENCODING_DATAPATH; +} + +std::string BluetoothAudioPortAidl::debugMessage() const { + return StringPrintf(": session_type=%s, cookie=%#hx", toString(mSessionType).c_str(), mCookie); +} + +} // namespace android::bluetooth::audio::aidl diff --git a/audio/bluetooth/ModuleBluetooth.cpp b/audio/bluetooth/ModuleBluetooth.cpp new file mode 100644 index 0000000..8c381cd --- /dev/null +++ b/audio/bluetooth/ModuleBluetooth.cpp @@ -0,0 +1,342 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_ModuleBluetooth" + +#include + +#include "BluetoothAudioSession.h" +#include "core-impl/ModuleBluetooth.h" +#include "core-impl/StreamBluetooth.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::bluetooth::audio::ChannelMode; +using aidl::android::hardware::bluetooth::audio::PcmConfiguration; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioConfigBase; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::AudioProfile; +using aidl::android::media::audio::common::Int; +using aidl::android::media::audio::common::MicrophoneInfo; +using aidl::android::media::audio::common::PcmType; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidl; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidlIn; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidlOut; + +// TODO(b/312265159) bluetooth audio should be in its own process +// Remove this and the shared_libs when that happens +extern "C" binder_status_t createIBluetoothAudioProviderFactory(); + +namespace aidl::android::hardware::audio::core { + +namespace { + +PcmType pcmTypeFromBitsPerSample(int8_t bitsPerSample) { + if (bitsPerSample == 8) + return PcmType::UINT_8_BIT; + else if (bitsPerSample == 16) + return PcmType::INT_16_BIT; + else if (bitsPerSample == 24) + return PcmType::INT_24_BIT; + else if (bitsPerSample == 32) + return PcmType::INT_32_BIT; + ALOGE("Unsupported bitsPerSample: %d", bitsPerSample); + return PcmType::DEFAULT; +} + +AudioChannelLayout channelLayoutFromChannelMode(ChannelMode mode) { + if (mode == ChannelMode::MONO) { + return AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_MONO); + } else if (mode == ChannelMode::STEREO || mode == ChannelMode::DUALMONO) { + return AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_STEREO); + } + ALOGE("Unsupported channel mode: %s", toString(mode).c_str()); + return AudioChannelLayout{}; +} + +} // namespace + +ModuleBluetooth::ModuleBluetooth(std::unique_ptr&& config) + : Module(Type::BLUETOOTH, std::move(config)) { + // TODO(b/312265159) bluetooth audio should be in its own process + // Remove this and the shared_libs when that happens + binder_status_t status = createIBluetoothAudioProviderFactory(); + if (status != STATUS_OK) { + LOG(ERROR) << "Failed to create bluetooth audio provider factory. Status: " + << ::android::statusToString(status); + } +} + +ndk::ScopedAStatus ModuleBluetooth::getBluetoothA2dp( + std::shared_ptr* _aidl_return) { + *_aidl_return = getBtA2dp().getInstance(); + LOG(DEBUG) << __func__ << ": returning instance of IBluetoothA2dp: " << _aidl_return->get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleBluetooth::getBluetoothLe(std::shared_ptr* _aidl_return) { + *_aidl_return = getBtLe().getInstance(); + LOG(DEBUG) << __func__ << ": returning instance of IBluetoothLe: " << _aidl_return->get(); + return ndk::ScopedAStatus::ok(); +} + +ChildInterface& ModuleBluetooth::getBtA2dp() { + if (!mBluetoothA2dp) { + auto handle = ndk::SharedRefBase::make(); + handle->registerHandler(std::bind(&ModuleBluetooth::bluetoothParametersUpdated, this)); + mBluetoothA2dp = handle; + } + return mBluetoothA2dp; +} + +ChildInterface& ModuleBluetooth::getBtLe() { + if (!mBluetoothLe) { + auto handle = ndk::SharedRefBase::make(); + handle->registerHandler(std::bind(&ModuleBluetooth::bluetoothParametersUpdated, this)); + mBluetoothLe = handle; + } + return mBluetoothLe; +} + +ModuleBluetooth::BtProfileHandles ModuleBluetooth::getBtProfileManagerHandles() { + return std::make_tuple(std::weak_ptr(), getBtA2dp().getPtr(), getBtLe().getPtr()); +} + +ndk::ScopedAStatus ModuleBluetooth::getMicMute(bool* _aidl_return __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleBluetooth::setMicMute(bool in_mute __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleBluetooth::setAudioPortConfig(const AudioPortConfig& in_requested, + AudioPortConfig* out_suggested, + bool* _aidl_return) { + auto fillConfig = [this](const AudioPort& port, AudioPortConfig* config) { + if (port.ext.getTag() == AudioPortExt::device) { + CachedProxy proxy; + auto status = findOrCreateProxy(port, proxy); + if (status.isOk()) { + const auto& pcmConfig = proxy.pcmConfig; + LOG(DEBUG) << "setAudioPortConfig: suggesting port config from " + << pcmConfig.toString(); + const auto pcmType = pcmTypeFromBitsPerSample(pcmConfig.bitsPerSample); + const auto channelMask = channelLayoutFromChannelMode(pcmConfig.channelMode); + if (pcmType != PcmType::DEFAULT && channelMask != AudioChannelLayout{}) { + config->format = + AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType}; + config->channelMask = channelMask; + config->sampleRate = Int{.value = pcmConfig.sampleRateHz}; + config->flags = port.flags; + config->ext = port.ext; + return true; + } + } + } + return generateDefaultPortConfig(port, config); + }; + return Module::setAudioPortConfigImpl(in_requested, fillConfig, out_suggested, _aidl_return); +} + +ndk::ScopedAStatus ModuleBluetooth::checkAudioPatchEndpointsMatch( + const std::vector& sources, const std::vector& sinks) { + // Both sources and sinks must be non-empty, this is guaranteed by 'setAudioPatch'. + const bool isInput = sources[0]->ext.getTag() == AudioPortExt::device; + const int32_t devicePortId = isInput ? sources[0]->portId : sinks[0]->portId; + const auto proxyIt = mProxies.find(devicePortId); + if (proxyIt == mProxies.end()) return ndk::ScopedAStatus::ok(); + const auto& pcmConfig = proxyIt->second.pcmConfig; + const AudioPortConfig* mixPortConfig = isInput ? sinks[0] : sources[0]; + if (!StreamBluetooth::checkConfigParams( + pcmConfig, AudioConfigBase{.sampleRate = mixPortConfig->sampleRate->value, + .channelMask = *(mixPortConfig->channelMask), + .format = *(mixPortConfig->format)})) { + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + if (int32_t handle = mixPortConfig->ext.get().handle; handle > 0) { + mConnections.insert(std::pair(handle, devicePortId)); + } + return ndk::ScopedAStatus::ok(); +} + +void ModuleBluetooth::onExternalDeviceConnectionChanged(const AudioPort& audioPort, + bool connected) { + if (!connected) mProxies.erase(audioPort.id); +} + +ndk::ScopedAStatus ModuleBluetooth::createInputStream( + StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones, std::shared_ptr* result) { + CachedProxy proxy; + RETURN_STATUS_IF_ERROR(fetchAndCheckProxy(context, proxy)); + return createStreamInstance(result, std::move(context), sinkMetadata, + microphones, getBtProfileManagerHandles(), + proxy.ptr, proxy.pcmConfig); +} + +ndk::ScopedAStatus ModuleBluetooth::createOutputStream( + StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, std::shared_ptr* result) { + CachedProxy proxy; + RETURN_STATUS_IF_ERROR(fetchAndCheckProxy(context, proxy)); + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo, getBtProfileManagerHandles(), + proxy.ptr, proxy.pcmConfig); +} + +ndk::ScopedAStatus ModuleBluetooth::populateConnectedDevicePort(AudioPort* audioPort, + int32_t nextPortId) { + if (audioPort->ext.getTag() != AudioPortExt::device) { + LOG(ERROR) << __func__ << ": not a device port: " << audioPort->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (!::aidl::android::hardware::bluetooth::audio::BluetoothAudioSession::IsAidlAvailable()) { + LOG(ERROR) << __func__ << ": IBluetoothAudioProviderFactory AIDL service not available"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + const auto& devicePort = audioPort->ext.get(); + const auto& description = devicePort.device.type; + // This method must return an error when the device can not be connected. + // Since A2DP/LE status events are sent asynchronously, it is more reliable + // to attempt connecting to the BT stack rather than judge by the A2DP/LE status. + if (description.connection != AudioDeviceDescription::CONNECTION_BT_A2DP && + description.connection != AudioDeviceDescription::CONNECTION_BT_LE && + !(description.connection == AudioDeviceDescription::CONNECTION_WIRELESS && + description.type == AudioDeviceType::OUT_HEARING_AID)) { + LOG(ERROR) << __func__ << ": unsupported device type: " << audioPort->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + CachedProxy proxy; + RETURN_STATUS_IF_ERROR(createProxy(*audioPort, nextPortId, proxy)); + // If the device is actually connected, it is configured by the BT stack. + // Provide the current configuration instead of all possible profiles. + const auto& pcmConfig = proxy.pcmConfig; + audioPort->profiles.clear(); + audioPort->profiles.push_back( + AudioProfile{.format = AudioFormatDescription{.type = AudioFormatType::PCM, + .pcm = pcmTypeFromBitsPerSample( + pcmConfig.bitsPerSample)}, + .channelMasks = std::vector( + {channelLayoutFromChannelMode(pcmConfig.channelMode)}), + .sampleRates = std::vector({pcmConfig.sampleRateHz})}); + LOG(DEBUG) << __func__ << ": " << audioPort->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleBluetooth::onMasterMuteChanged(bool) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleBluetooth::onMasterVolumeChanged(float) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +int32_t ModuleBluetooth::getNominalLatencyMs(const AudioPortConfig& portConfig) { + const auto connectionsIt = mConnections.find(portConfig.ext.get().handle); + if (connectionsIt != mConnections.end()) { + const auto proxyIt = mProxies.find(connectionsIt->second); + if (proxyIt != mProxies.end()) { + auto proxy = proxyIt->second.ptr; + size_t dataIntervalUs = 0; + if (!proxy->getPreferredDataIntervalUs(dataIntervalUs)) { + LOG(WARNING) << __func__ << ": could not fetch preferred data interval"; + } + const bool isInput = portConfig.flags->getTag() == AudioIoFlags::input; + return isInput ? StreamInBluetooth::getNominalLatencyMs(dataIntervalUs) + : StreamOutBluetooth::getNominalLatencyMs(dataIntervalUs); + } + } + LOG(ERROR) << __func__ << ": no connection or proxy found for " << portConfig.toString(); + return Module::getNominalLatencyMs(portConfig); +} + +ndk::ScopedAStatus ModuleBluetooth::createProxy(const AudioPort& audioPort, int32_t instancePortId, + CachedProxy& proxy) { + const bool isInput = audioPort.flags.getTag() == AudioIoFlags::input; + proxy.ptr = isInput ? std::shared_ptr( + std::make_shared()) + : std::shared_ptr( + std::make_shared()); + const auto& devicePort = audioPort.ext.get(); + const auto device = devicePort.device.type; + bool registrationSuccess = false; + for (int i = 0; i < kCreateProxyRetries && !registrationSuccess; ++i) { + registrationSuccess = proxy.ptr->registerPort(device); + usleep(kCreateProxyRetrySleepMs * 1000); + } + if (!registrationSuccess) { + LOG(ERROR) << __func__ << ": failed to register BT port for " << device.toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (!proxy.ptr->loadAudioConfig(proxy.pcmConfig)) { + LOG(ERROR) << __func__ << ": state=" << proxy.ptr->getState() + << ", failed to load audio config"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + mProxies.insert(std::pair(instancePortId, proxy)); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleBluetooth::fetchAndCheckProxy(const StreamContext& context, + CachedProxy& proxy) { + const auto connectionsIt = mConnections.find(context.getMixPortHandle()); + if (connectionsIt != mConnections.end()) { + const auto proxyIt = mProxies.find(connectionsIt->second); + if (proxyIt != mProxies.end()) { + proxy = proxyIt->second; + mProxies.erase(proxyIt); + } + mConnections.erase(connectionsIt); + } + if (proxy.ptr != nullptr) { + if (!StreamBluetooth::checkConfigParams( + proxy.pcmConfig, AudioConfigBase{.sampleRate = context.getSampleRate(), + .channelMask = context.getChannelLayout(), + .format = context.getFormat()})) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + } + // Not having a proxy is OK, it may happen in VTS tests when streams are opened on unconnected + // mix ports. + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleBluetooth::findOrCreateProxy(const AudioPort& audioPort, + CachedProxy& proxy) { + if (auto proxyIt = mProxies.find(audioPort.id); proxyIt != mProxies.end()) { + proxy = proxyIt->second; + return ndk::ScopedAStatus::ok(); + } + return createProxy(audioPort, audioPort.id, proxy); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/bluetooth/StreamBluetooth.cpp b/audio/bluetooth/StreamBluetooth.cpp new file mode 100644 index 0000000..77ce121 --- /dev/null +++ b/audio/bluetooth/StreamBluetooth.cpp @@ -0,0 +1,272 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_StreamBluetooth" +#include +#include +#include + +#include "core-impl/StreamBluetooth.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::audio::core::VendorParameter; +using aidl::android::hardware::bluetooth::audio::ChannelMode; +using aidl::android::hardware::bluetooth::audio::PcmConfiguration; +using aidl::android::hardware::bluetooth::audio::PresentationPosition; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioConfigBase; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneDynamicInfo; +using aidl::android::media::audio::common::MicrophoneInfo; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidl; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidlIn; +using android::bluetooth::audio::aidl::BluetoothAudioPortAidlOut; +using android::bluetooth::audio::aidl::BluetoothStreamState; + +namespace aidl::android::hardware::audio::core { + +constexpr int kBluetoothDefaultInputBufferMs = 20; +constexpr int kBluetoothDefaultOutputBufferMs = 10; +// constexpr int kBluetoothSpatializerOutputBufferMs = 10; +constexpr int kBluetoothDefaultRemoteDelayMs = 200; + +StreamBluetooth::StreamBluetooth(StreamContext* context, const Metadata& metadata, + ModuleBluetooth::BtProfileHandles&& btHandles, + const std::shared_ptr& btDeviceProxy, + const PcmConfiguration& pcmConfig) + : StreamCommonImpl(context, metadata), + mFrameSizeBytes(getContext().getFrameSize()), + mIsInput(isInput(metadata)), + mBluetoothA2dp(std::move(std::get(btHandles))), + mBluetoothLe(std::move(std::get(btHandles))), + mPreferredDataIntervalUs(pcmConfig.dataIntervalUs != 0 + ? pcmConfig.dataIntervalUs + : (mIsInput ? kBluetoothDefaultInputBufferMs + : kBluetoothDefaultOutputBufferMs) * + 1000), + mBtDeviceProxy(btDeviceProxy) {} + +StreamBluetooth::~StreamBluetooth() { + cleanupWorker(); +} + +::android::status_t StreamBluetooth::init(DriverCallbackInterface*) { + std::lock_guard guard(mLock); + if (mBtDeviceProxy == nullptr) { + // This is a normal situation in VTS tests. + LOG(INFO) << __func__ << ": no BT HAL proxy, stream is non-functional"; + } + LOG(INFO) << __func__ << ": preferred data interval (us): " << mPreferredDataIntervalUs; + return ::android::OK; +} + +::android::status_t StreamBluetooth::drain(StreamDescriptor::DrainMode) { + usleep(1000); + return ::android::OK; +} + +::android::status_t StreamBluetooth::flush() { + usleep(1000); + return ::android::OK; +} + +::android::status_t StreamBluetooth::pause() { + return standby(); +} + +::android::status_t StreamBluetooth::transfer(void* buffer, size_t frameCount, + size_t* actualFrameCount, int32_t* latencyMs) { + std::lock_guard guard(mLock); + *actualFrameCount = 0; + *latencyMs = StreamDescriptor::LATENCY_UNKNOWN; + if (mBtDeviceProxy == nullptr || mBtDeviceProxy->getState() == BluetoothStreamState::DISABLED) { + // The BT session is turned down, silently ignore write. + return ::android::OK; + } + if (!mBtDeviceProxy->start()) { + LOG(WARNING) << __func__ << ": state= " << mBtDeviceProxy->getState() + << " failed to start, will retry"; + return ::android::OK; + } + *latencyMs = 0; + const size_t bytesToTransfer = frameCount * mFrameSizeBytes; + const size_t bytesTransferred = mIsInput ? mBtDeviceProxy->readData(buffer, bytesToTransfer) + : mBtDeviceProxy->writeData(buffer, bytesToTransfer); + *actualFrameCount = bytesTransferred / mFrameSizeBytes; + PresentationPosition presentation_position; + if (!mBtDeviceProxy->getPresentationPosition(presentation_position)) { + presentation_position.remoteDeviceAudioDelayNanos = + kBluetoothDefaultRemoteDelayMs * NANOS_PER_MILLISECOND; + LOG(WARNING) << __func__ << ": getPresentationPosition failed, latency info is unavailable"; + } + // TODO(b/317117580): incorporate logic from + // packages/modules/Bluetooth/system/audio_bluetooth_hw/stream_apis.cc + // out_calculate_feeding_delay_ms / in_calculate_starving_delay_ms + *latencyMs = std::max(*latencyMs, (int32_t)(presentation_position.remoteDeviceAudioDelayNanos / + NANOS_PER_MILLISECOND)); + return ::android::OK; +} + +// static +bool StreamBluetooth::checkConfigParams(const PcmConfiguration& pcmConfig, + const AudioConfigBase& config) { + if ((int)config.sampleRate != pcmConfig.sampleRateHz) { + LOG(ERROR) << __func__ << ": sample rate mismatch, stream value=" << config.sampleRate + << ", BT HAL value=" << pcmConfig.sampleRateHz; + return false; + } + const auto channelCount = + aidl::android::hardware::audio::common::getChannelCount(config.channelMask); + if ((pcmConfig.channelMode == ChannelMode::MONO && channelCount != 1) || + (pcmConfig.channelMode == ChannelMode::STEREO && channelCount != 2)) { + LOG(ERROR) << __func__ << ": Channel count mismatch, stream value=" << channelCount + << ", BT HAL value=" << toString(pcmConfig.channelMode); + return false; + } + if (config.format.type != AudioFormatType::PCM) { + LOG(ERROR) << __func__ + << ": unexpected stream format type: " << toString(config.format.type); + return false; + } + const int8_t bitsPerSample = + aidl::android::hardware::audio::common::getPcmSampleSizeInBytes(config.format.pcm) * 8; + if (bitsPerSample != pcmConfig.bitsPerSample) { + LOG(ERROR) << __func__ << ": bits per sample mismatch, stream value=" << bitsPerSample + << ", BT HAL value=" << pcmConfig.bitsPerSample; + return false; + } + return true; +} + +ndk::ScopedAStatus StreamBluetooth::prepareToClose() { + std::lock_guard guard(mLock); + if (mBtDeviceProxy != nullptr) { + if (mBtDeviceProxy->getState() != BluetoothStreamState::DISABLED) { + mBtDeviceProxy->stop(); + } + } + return ndk::ScopedAStatus::ok(); +} + +::android::status_t StreamBluetooth::standby() { + std::lock_guard guard(mLock); + if (mBtDeviceProxy != nullptr) mBtDeviceProxy->suspend(); + return ::android::OK; +} + +::android::status_t StreamBluetooth::start() { + std::lock_guard guard(mLock); + if (mBtDeviceProxy != nullptr) mBtDeviceProxy->start(); + return ::android::OK; +} + +void StreamBluetooth::shutdown() { + std::lock_guard guard(mLock); + if (mBtDeviceProxy != nullptr) { + mBtDeviceProxy->stop(); + mBtDeviceProxy = nullptr; + } +} + +ndk::ScopedAStatus StreamBluetooth::updateMetadataCommon(const Metadata& metadata) { + std::lock_guard guard(mLock); + if (mBtDeviceProxy == nullptr) { + return ndk::ScopedAStatus::ok(); + } + bool isOk = true; + if (isInput(metadata)) { + isOk = mBtDeviceProxy->updateSinkMetadata(std::get(metadata)); + } else { + isOk = mBtDeviceProxy->updateSourceMetadata(std::get(metadata)); + } + return isOk ? ndk::ScopedAStatus::ok() + : ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus StreamBluetooth::bluetoothParametersUpdated() { + if (mIsInput) { + return ndk::ScopedAStatus::ok(); + } + auto applyParam = [](const std::shared_ptr& proxy, + bool isEnabled) -> bool { + if (!isEnabled) { + if (proxy->suspend()) return proxy->setState(BluetoothStreamState::DISABLED); + return false; + } + return proxy->standby(); + }; + bool hasA2dpParam, enableA2dp; + auto btA2dp = mBluetoothA2dp.lock(); + hasA2dpParam = btA2dp != nullptr && btA2dp->isEnabled(&enableA2dp).isOk(); + bool hasLeParam, enableLe; + auto btLe = mBluetoothLe.lock(); + hasLeParam = btLe != nullptr && btLe->isEnabled(&enableLe).isOk(); + std::lock_guard guard(mLock); + if (mBtDeviceProxy != nullptr) { + if ((hasA2dpParam && mBtDeviceProxy->isA2dp() && !applyParam(mBtDeviceProxy, enableA2dp)) || + (hasLeParam && mBtDeviceProxy->isLeAudio() && !applyParam(mBtDeviceProxy, enableLe))) { + LOG(DEBUG) << __func__ << ": applyParam failed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + } + return ndk::ScopedAStatus::ok(); +} + +// static +int32_t StreamInBluetooth::getNominalLatencyMs(size_t dataIntervalUs) { + if (dataIntervalUs == 0) dataIntervalUs = kBluetoothDefaultInputBufferMs * 1000LL; + return dataIntervalUs / 1000LL; +} + +StreamInBluetooth::StreamInBluetooth(StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones, + ModuleBluetooth::BtProfileHandles&& btProfileHandles, + const std::shared_ptr& btDeviceProxy, + const PcmConfiguration& pcmConfig) + : StreamIn(std::move(context), microphones), + StreamBluetooth(&mContextInstance, sinkMetadata, std::move(btProfileHandles), btDeviceProxy, + pcmConfig) {} + +ndk::ScopedAStatus StreamInBluetooth::getActiveMicrophones( + std::vector* _aidl_return __unused) { + LOG(DEBUG) << __func__ << ": not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +// static +int32_t StreamOutBluetooth::getNominalLatencyMs(size_t dataIntervalUs) { + if (dataIntervalUs == 0) dataIntervalUs = kBluetoothDefaultOutputBufferMs * 1000LL; + return dataIntervalUs / 1000LL; +} + +StreamOutBluetooth::StreamOutBluetooth(StreamContext&& context, + const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, + ModuleBluetooth::BtProfileHandles&& btProfileHandles, + const std::shared_ptr& btDeviceProxy, + const PcmConfiguration& pcmConfig) + : StreamOut(std::move(context), offloadInfo), + StreamBluetooth(&mContextInstance, sourceMetadata, std::move(btProfileHandles), btDeviceProxy, + pcmConfig) {} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/config/audioPolicy/Android.bp b/audio/config/audioPolicy/Android.bp new file mode 100644 index 0000000..baa3762 --- /dev/null +++ b/audio/config/audioPolicy/Android.bp @@ -0,0 +1,16 @@ +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +xsd_config { + name: "audio_policy_configuration_aidl_default", + srcs: ["audio_policy_configuration.xsd"], + package_name: "android.audio.policy.configuration", + nullability: true, +} diff --git a/audio/config/audioPolicy/api/current.txt b/audio/config/audioPolicy/api/current.txt new file mode 100644 index 0000000..c675820 --- /dev/null +++ b/audio/config/audioPolicy/api/current.txt @@ -0,0 +1,582 @@ +// Signature format: 2.0 +package android.audio.policy.configuration { + + public class AttachedDevices { + ctor public AttachedDevices(); + method @Nullable public java.util.List getItem(); + } + + public enum AudioChannelMask { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_10; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_11; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_12; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_13; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_14; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_15; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_16; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_17; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_18; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_19; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_20; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_21; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_22; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_23; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_24; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_3; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_4; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_5; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_6; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_7; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_8; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_9; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT0POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT0POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_5POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_6; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_FRONT_BACK; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_MONO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_PENTA; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_QUAD; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_STEREO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_TRI; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_CALL_MONO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_NONE; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_13POINT0; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_22POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT0POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT0POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT4; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_BACK; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_SIDE; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_6POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT2; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT4; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_9POINT1POINT4; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_9POINT1POINT6; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_HAPTIC_AB; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_A; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_PENTA; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_BACK; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_SIDE; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_SURROUND; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_TRI; + enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_TRI_BACK; + } + + public enum AudioDevice { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_AMBIENT; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_AUX_DIGITAL; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BACK_MIC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLE_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_A2DP; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_BLE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BUILTIN_MIC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BUS; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_COMMUNICATION; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_DEFAULT; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_ECHO_REFERENCE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_FM_TUNER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI_ARC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI_EARC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_IP; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_LINE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_LOOPBACK; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_PROXY; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_REMOTE_SUBMIX; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_SPDIF; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_STUB; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_TELEPHONY_RX; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_TV_TUNER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_ACCESSORY; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_DEVICE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_VOICE_CALL; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_WIRED_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_NONE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_AUX_DIGITAL; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_AUX_LINE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_BROADCAST; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_SPEAKER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BUS; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_DEFAULT; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_EARPIECE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_ECHO_CANCELLER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_FM; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI_ARC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI_EARC; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HEARING_AID; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_IP; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_LINE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_PROXY; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_REMOTE_SUBMIX; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPDIF; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPEAKER; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPEAKER_SAFE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_STUB; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_TELEPHONY_TX; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_ACCESSORY; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_DEVICE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_HEADSET; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADPHONE; + enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADSET; + } + + public enum AudioFormat { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADIF; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_ELD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_ERLC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V1; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LTP; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_MAIN; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_SCALABLE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_SSR; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_XHE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ELD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ERLC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_HE_V1; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_HE_V2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V1; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_LC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LTP; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_MAIN; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_SCALABLE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_SSR; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_XHE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC4; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC4_L4; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_ALAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_NB; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_WB; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_WB_PLUS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE_QLEA; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE_R4; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_HD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_TWSP; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_CELT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DEFAULT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DOLBY_TRUEHD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DRA; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DSD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_HD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_HD_MA; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_UHD; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_UHD_P2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCB; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCNW; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCWB; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_E_AC3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_E_AC3_JOC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_FLAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_HE_AAC_V1; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_HE_AAC_V2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_AAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_AAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_FLAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_OPUS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_PCM; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_FLAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_OPUS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_PCM; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_AAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_FLAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_OPUS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_PCM; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IEC60958; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IEC61937; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LC3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LDAC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LHDC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LHDC_LL; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_1_0; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_2_0; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_2_1; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MP2; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MP3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_BL_L3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_BL_L4; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_LC_L3; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_LC_L4; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_OPUS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_16_BIT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_24_BIT_PACKED; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_32_BIT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_8_24_BIT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_8_BIT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_FLOAT; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_QCELP; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_SBC; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_VORBIS; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_WMA; + enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_WMA_PRO; + } + + public enum AudioGainMode { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_CHANNELS; + enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_JOINT; + enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_RAMP; + } + + public enum AudioInOutFlag { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_DIRECT; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_FAST; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HOTWORD_TAP; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_AV_SYNC; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_HOTWORD; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_LOOKBACK; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_MMAP_NOIRQ; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_RAW; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_SYNC; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_ULTRASOUND; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_VOIP_TX; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_BIT_PERFECT; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DEEP_BUFFER; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DIRECT; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DIRECT_PCM; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_FAST; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_HW_AV_SYNC; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_INCALL_MUSIC; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_MMAP_NOIRQ; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_NON_BLOCKING; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_PRIMARY; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_RAW; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_SPATIALIZER; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_SYNC; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_TTS; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_ULTRASOUND; + enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_VOIP_RX; + } + + public class AudioPolicyConfiguration { + ctor public AudioPolicyConfiguration(); + method @Nullable public android.audio.policy.configuration.GlobalConfiguration getGlobalConfiguration(); + method @Nullable public java.util.List getModules(); + method @Nullable public android.audio.policy.configuration.SurroundSound getSurroundSound(); + method @Nullable public android.audio.policy.configuration.Version getVersion(); + method @Nullable public java.util.List getVolumes(); + method public void setGlobalConfiguration(@Nullable android.audio.policy.configuration.GlobalConfiguration); + method public void setSurroundSound(@Nullable android.audio.policy.configuration.SurroundSound); + method public void setVersion(@Nullable android.audio.policy.configuration.Version); + } + + public enum AudioSource { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_CAMCORDER; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_DEFAULT; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_ECHO_REFERENCE; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_FM_TUNER; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_HOTWORD; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_MIC; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_REMOTE_SUBMIX; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_ULTRASOUND; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_UNPROCESSED; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_CALL; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_COMMUNICATION; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_DOWNLINK; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_PERFORMANCE; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_RECOGNITION; + enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_UPLINK; + } + + public enum AudioStreamType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ACCESSIBILITY; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ALARM; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ASSISTANT; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_BLUETOOTH_SCO; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_CALL_ASSISTANT; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_DTMF; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ENFORCED_AUDIBLE; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_MUSIC; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_NOTIFICATION; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_PATCH; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_REROUTING; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_RING; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_SYSTEM; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_TTS; + enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_VOICE_CALL; + } + + public enum DeviceCategory { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_EARPIECE; + enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_EXT_MEDIA; + enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_HEADSET; + enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_HEARING_AID; + enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_SPEAKER; + } + + public class DevicePorts { + ctor public DevicePorts(); + method @Nullable public java.util.List getDevicePort(); + } + + public static class DevicePorts.DevicePort { + ctor public DevicePorts.DevicePort(); + method @Nullable public String getAddress(); + method @Nullable public java.util.List getEncodedFormats(); + method @Nullable public android.audio.policy.configuration.Gains getGains(); + method @Nullable public java.util.List getProfile(); + method @Nullable public android.audio.policy.configuration.Role getRole(); + method @Nullable public String getTagName(); + method @Nullable public String getType(); + method @Nullable public boolean get_default(); + method public void setAddress(@Nullable String); + method public void setEncodedFormats(@Nullable java.util.List); + method public void setGains(@Nullable android.audio.policy.configuration.Gains); + method public void setRole(@Nullable android.audio.policy.configuration.Role); + method public void setTagName(@Nullable String); + method public void setType(@Nullable String); + method public void set_default(@Nullable boolean); + } + + public enum EngineSuffix { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.EngineSuffix _default; + enum_constant public static final android.audio.policy.configuration.EngineSuffix configurable; + } + + public class Gains { + ctor public Gains(); + method @Nullable public java.util.List getGain(); + } + + public static class Gains.Gain { + ctor public Gains.Gain(); + method @Nullable public android.audio.policy.configuration.AudioChannelMask getChannel_mask(); + method @Nullable public int getDefaultValueMB(); + method @Nullable public int getMaxRampMs(); + method @Nullable public int getMaxValueMB(); + method @Nullable public int getMinRampMs(); + method @Nullable public int getMinValueMB(); + method @Nullable public java.util.List getMode(); + method @Nullable public int getStepValueMB(); + method @Nullable public boolean getUseForVolume(); + method public void setChannel_mask(@Nullable android.audio.policy.configuration.AudioChannelMask); + method public void setDefaultValueMB(@Nullable int); + method public void setMaxRampMs(@Nullable int); + method public void setMaxValueMB(@Nullable int); + method public void setMinRampMs(@Nullable int); + method public void setMinValueMB(@Nullable int); + method public void setMode(@Nullable java.util.List); + method public void setStepValueMB(@Nullable int); + method public void setUseForVolume(@Nullable boolean); + } + + public class GlobalConfiguration { + ctor public GlobalConfiguration(); + method @Nullable public boolean getCall_screen_mode_supported(); + method @Nullable public android.audio.policy.configuration.EngineSuffix getEngine_library(); + method @Nullable public boolean getSpeaker_drc_enabled(); + method public void setCall_screen_mode_supported(@Nullable boolean); + method public void setEngine_library(@Nullable android.audio.policy.configuration.EngineSuffix); + method public void setSpeaker_drc_enabled(@Nullable boolean); + } + + public enum HalVersion { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.HalVersion _2_0; + enum_constant public static final android.audio.policy.configuration.HalVersion _3_0; + } + + public class MixPorts { + ctor public MixPorts(); + method @Nullable public java.util.List getMixPort(); + } + + public static class MixPorts.MixPort { + ctor public MixPorts.MixPort(); + method @Nullable public java.util.List getFlags(); + method @Nullable public android.audio.policy.configuration.Gains getGains(); + method @Nullable public long getMaxActiveCount(); + method @Nullable public long getMaxOpenCount(); + method @Nullable public String getName(); + method @Nullable public java.util.List getProfile(); + method @Nullable public long getRecommendedMuteDurationMs(); + method @Nullable public android.audio.policy.configuration.Role getRole(); + method public void setFlags(@Nullable java.util.List); + method public void setGains(@Nullable android.audio.policy.configuration.Gains); + method public void setMaxActiveCount(@Nullable long); + method public void setMaxOpenCount(@Nullable long); + method public void setName(@Nullable String); + method public void setRecommendedMuteDurationMs(@Nullable long); + method public void setRole(@Nullable android.audio.policy.configuration.Role); + } + + public enum MixType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.MixType mix; + enum_constant public static final android.audio.policy.configuration.MixType mux; + } + + public class Modules { + ctor public Modules(); + method @Nullable public java.util.List getModule(); + } + + public static class Modules.Module { + ctor public Modules.Module(); + method @Nullable public android.audio.policy.configuration.AttachedDevices getAttachedDevices(); + method @Nullable public String getDefaultOutputDevice(); + method @Nullable public android.audio.policy.configuration.DevicePorts getDevicePorts(); + method @Nullable public android.audio.policy.configuration.HalVersion getHalVersion(); + method @Nullable public android.audio.policy.configuration.MixPorts getMixPorts(); + method @Nullable public String getName(); + method @Nullable public android.audio.policy.configuration.Routes getRoutes(); + method public void setAttachedDevices(@Nullable android.audio.policy.configuration.AttachedDevices); + method public void setDefaultOutputDevice(@Nullable String); + method public void setDevicePorts(@Nullable android.audio.policy.configuration.DevicePorts); + method public void setHalVersion(@Nullable android.audio.policy.configuration.HalVersion); + method public void setMixPorts(@Nullable android.audio.policy.configuration.MixPorts); + method public void setName(@Nullable String); + method public void setRoutes(@Nullable android.audio.policy.configuration.Routes); + } + + public class Profile { + ctor public Profile(); + method @Nullable public java.util.List getChannelMasks(); + method @Nullable public String getFormat(); + method @Nullable public java.util.List getSamplingRates(); + method public void setChannelMasks(@Nullable java.util.List); + method public void setFormat(@Nullable String); + method public void setSamplingRates(@Nullable java.util.List); + } + + public class Reference { + ctor public Reference(); + method @Nullable public String getName(); + method @Nullable public java.util.List getPoint(); + method public void setName(@Nullable String); + } + + public enum Role { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.Role sink; + enum_constant public static final android.audio.policy.configuration.Role source; + } + + public class Routes { + ctor public Routes(); + method @Nullable public java.util.List getRoute(); + } + + public static class Routes.Route { + ctor public Routes.Route(); + method @Nullable public String getSink(); + method @Nullable public String getSources(); + method @Nullable public android.audio.policy.configuration.MixType getType(); + method public void setSink(@Nullable String); + method public void setSources(@Nullable String); + method public void setType(@Nullable android.audio.policy.configuration.MixType); + } + + public class SurroundFormats { + ctor public SurroundFormats(); + method @Nullable public java.util.List getFormat(); + } + + public static class SurroundFormats.Format { + ctor public SurroundFormats.Format(); + method @Nullable public String getName(); + method @Nullable public java.util.List getSubformats(); + method public void setName(@Nullable String); + method public void setSubformats(@Nullable java.util.List); + } + + public class SurroundSound { + ctor public SurroundSound(); + method @Nullable public android.audio.policy.configuration.SurroundFormats getFormats(); + method public void setFormats(@Nullable android.audio.policy.configuration.SurroundFormats); + } + + public enum Version { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.configuration.Version _7_0; + enum_constant public static final android.audio.policy.configuration.Version _7_1; + } + + public class Volume { + ctor public Volume(); + method @Nullable public android.audio.policy.configuration.DeviceCategory getDeviceCategory(); + method @Nullable public java.util.List getPoint(); + method @Nullable public String getRef(); + method @Nullable public android.audio.policy.configuration.AudioStreamType getStream(); + method public void setDeviceCategory(@Nullable android.audio.policy.configuration.DeviceCategory); + method public void setRef(@Nullable String); + method public void setStream(@Nullable android.audio.policy.configuration.AudioStreamType); + } + + public class Volumes { + ctor public Volumes(); + method @Nullable public java.util.List getReference(); + method @Nullable public java.util.List getVolume(); + } + + public class XmlParser { + ctor public XmlParser(); + method @Nullable public static android.audio.policy.configuration.AudioPolicyConfiguration read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + } + +} + diff --git a/audio/config/audioPolicy/api/last_current.txt b/audio/config/audioPolicy/api/last_current.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/api/last_removed.txt b/audio/config/audioPolicy/api/last_removed.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/api/removed.txt b/audio/config/audioPolicy/api/removed.txt new file mode 100644 index 0000000..d802177 --- /dev/null +++ b/audio/config/audioPolicy/api/removed.txt @@ -0,0 +1 @@ +// Signature format: 2.0 diff --git a/audio/config/audioPolicy/audio_policy_configuration.xsd b/audio/config/audioPolicy/audio_policy_configuration.xsd new file mode 100644 index 0000000..94856a5 --- /dev/null +++ b/audio/config/audioPolicy/audio_policy_configuration.xsd @@ -0,0 +1,778 @@ + + + + + + + + + + + + + + Version of the interface the hal implements. Note that this + relates to legacy HAL API versions since HIDL APIs are versioned + using other mechanisms. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + There should be one section per audio HW module present on the platform. + Each contains two mandatory tags: “halVersion” and “name”. + The module "name" is the same as in previous .conf file. + Each module must contain the following sections: + - : a list of device descriptors for all + input and output devices accessible via this module. + This contains both permanently attached devices and removable devices. + - : listing all output and input streams exposed by the audio HAL + - : list of possible connections between input + and output devices or between stream and devices. + A is defined by a set of 3 attributes: + -"type": mux|mix means all sources are mutual exclusive (mux) or can be mixed (mix) + -"sink": the sink involved in this route + -"sources": all the sources than can be connected to the sink via this route + - : permanently attached devices. + The attachedDevices section is a list of devices names. + Their names correspond to device names defined in "devicePorts" section. + - is the device to be used when no policy rule applies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The flags indicate suggested stream attributes supported by the profile. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Audio channel mask specifies presence of particular channels. + There are two representations: + - representation position (traditional discrete channel specification, + e.g. "left", "right"); + - indexed (this is similar to "tracks" in audio mixing, channels + are represented using numbers). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The default device will be used if multiple have the same type + and no explicit route request exists for a specific device of + that type. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + List all available sources for a given sink. + + + + + + + + + + + + + + + + + + + + + + Comma separated pair of number. + The fist one is the framework level (between 0 and 100). + The second one is the volume to send to the HAL. + The framework will interpolate volumes not specified. + Their MUST be at least 2 points specified. + + + + + + + + + + Audio stream type describing the intended use case of a stream. + Please consult frameworks/base/media/java/android/media/AudioSystem.java + for the description of each value. + + + + + + + + + + + + + + + + + + + + + + + + An audio source defines the intended use case for the sound being recorded. + Please consult frameworks/base/media/java/android/media/MediaRecorder.java + for the description of each value. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Volume section defines a volume curve for a given use case and device category. + It contains a list of points of this curve expressing the attenuation in Millibels + for a given volume index from 0 to 100. + + 0,-9600 + 100,0 + + + It may also reference a reference/@name to avoid duplicating curves. + + + 0,-9600 + 100,0 + + + + + + + + + + + + + + + + + + + + Surround Sound section provides configuration related to handling of + multi-channel formats. + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/audio/config/audioPolicy/capengine/Android.bp b/audio/config/audioPolicy/capengine/Android.bp new file mode 100644 index 0000000..cb99923 --- /dev/null +++ b/audio/config/audioPolicy/capengine/Android.bp @@ -0,0 +1,17 @@ +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +xsd_config { + name: "audio_policy_capengine_configuration_aidl_default", + srcs: ["PolicyConfigurableDomains.xsd"], + package_name: "android.audio.policy.capengine.configuration", + nullability: true, + root_elements: ["ConfigurableDomains"], +} diff --git a/audio/config/audioPolicy/capengine/PolicyConfigurableDomains.xsd b/audio/config/audioPolicy/capengine/PolicyConfigurableDomains.xsd new file mode 100644 index 0000000..4e7c0bb --- /dev/null +++ b/audio/config/audioPolicy/capengine/PolicyConfigurableDomains.xsd @@ -0,0 +1,467 @@ + + + + + + + See http://www.w3.org/XML/1998/namespace.html and + http://www.w3.org/TR/REC-xml for information about this namespace. + + This schema document describes the XML namespace, in a form + suitable for import by other schema documents. + + Note that local names in this namespace are intended to be defined + only by the World Wide Web Consortium or its subgroups. The + following names are currently defined in this namespace and should + not be used with conflicting semantics by any Working Group, + specification, or document instance: + + base (as an attribute name): denotes an attribute whose value + provides a URI to be used as the base for interpreting any + relative URIs in the scope of the element on which it + appears; its value is inherited. This name is reserved + by virtue of its definition in the XML Base specification. + + id (as an attribute name): denotes an attribute whose value + should be interpreted as if declared to be of type ID. + The xml:id specification is not yet a W3C Recommendation, + but this attribute is included here to facilitate experimentation + with the mechanisms it proposes. Note that it is _not_ included + in the specialAttrs attribute group. + + lang (as an attribute name): denotes an attribute whose value + is a language code for the natural language of the content of + any element; its value is inherited. This name is reserved + by virtue of its definition in the XML specification. + + space (as an attribute name): denotes an attribute whose + value is a keyword indicating what whitespace processing + discipline is intended for the content of the element; its + value is inherited. This name is reserved by virtue of its + definition in the XML specification. + + Father (in any context at all): denotes Jon Bosak, the chair of + the original XML Working Group. This name is reserved by + the following decision of the W3C XML Plenary and + XML Coordination groups: + + In appreciation for his vision, leadership and dedication + the W3C XML Plenary on this 10th day of February, 2000 + reserves for Jon Bosak in perpetuity the XML name + xml:Father + + + + + This schema defines attributes and an attribute group + suitable for use by + schemas wishing to allow xml:base, xml:lang, xml:space or xml:id + attributes on elements they define. + + To enable this, such a schema must import this schema + for the XML namespace, e.g. as follows: + <schema . . .> + . . . + <import namespace="http://www.w3.org/XML/1998/namespace" + schemaLocation="http://www.w3.org/2005/08/xml.xsd"/> + + Subsequently, qualified reference to any of the attributes + or the group defined below will have the desired effect, e.g. + + <type . . .> + . . . + <attributeGroup ref="xml:specialAttrs"/> + + will define a type which will schema-validate an instance + element with any of those attributes + + + + In keeping with the XML Schema WG's standard versioning + policy, this schema document will persist at + http://www.w3.org/2005/08/xml.xsd. + At the date of issue it can also be found at + http://www.w3.org/2001/xml.xsd. + The schema document at that URI may however change in the future, + in order to remain compatible with the latest version of XML Schema + itself, or with the XML namespace itself. In other words, if the XML + Schema or XML namespaces change, the version of this document at + http://www.w3.org/2001/xml.xsd will change + accordingly; the version at + http://www.w3.org/2005/08/xml.xsd will not change. + + + + + + Attempting to install the relevant ISO 2- and 3-letter + codes as the enumerated possible values is probably never + going to be a realistic possibility. See + RFC 3066 at http://www.ietf.org/rfc/rfc3066.txt and the IANA registry + at http://www.iana.org/assignments/lang-tag-apps.htm for + further information. + + The union allows for the 'un-declaration' of xml:lang with + the empty string. + + + + + + + + + + + + + + + + + + + + + + + + See http://www.w3.org/TR/xmlbase/ for + information about this attribute. + + + + + + See http://www.w3.org/TR/xml-id/ for + information about this attribute. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/audio/config/audioPolicy/capengine/api/current.txt b/audio/config/audioPolicy/capengine/api/current.txt new file mode 100644 index 0000000..481abbf --- /dev/null +++ b/audio/config/audioPolicy/capengine/api/current.txt @@ -0,0 +1,264 @@ +// Signature format: 2.0 +package android.audio.policy.capengine.configuration { + + public class BitParameterBlockType { + ctor public BitParameterBlockType(); + method @Nullable public java.util.List getBitParameter(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + } + + public class BooleanParameterType { + ctor public BooleanParameterType(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method @Nullable public String getValue(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + method public void setValue(@Nullable String); + } + + public class ComponentType { + ctor public ComponentType(); + method @Nullable public String getName(); + method @Nullable public java.util.List getSubsystem_optional(); + method public void setName(@Nullable String); + } + + public class CompoundRuleType { + ctor public CompoundRuleType(); + method @Nullable public android.audio.policy.capengine.configuration.CompoundRuleType getCompoundRule_optional(); + method @Nullable public android.audio.policy.capengine.configuration.SelectionCriterionRuleType getSelectionCriterionRule_optional(); + method @Nullable public android.audio.policy.capengine.configuration.TypeEnum getType(); + method public void setCompoundRule_optional(@Nullable android.audio.policy.capengine.configuration.CompoundRuleType); + method public void setSelectionCriterionRule_optional(@Nullable android.audio.policy.capengine.configuration.SelectionCriterionRuleType); + method public void setType(@Nullable android.audio.policy.capengine.configuration.TypeEnum); + } + + public class ConfigurableDomainType { + ctor public ConfigurableDomainType(); + method @Nullable public android.audio.policy.capengine.configuration.ConfigurableElementsType getConfigurableElements(); + method @Nullable public android.audio.policy.capengine.configuration.ConfigurationsType getConfigurations(); + method @Nullable public String getName(); + method @Nullable public boolean getSequenceAware(); + method @Nullable public android.audio.policy.capengine.configuration.SettingsType getSettings(); + method public void setConfigurableElements(@Nullable android.audio.policy.capengine.configuration.ConfigurableElementsType); + method public void setConfigurations(@Nullable android.audio.policy.capengine.configuration.ConfigurationsType); + method public void setName(@Nullable String); + method public void setSequenceAware(@Nullable boolean); + method public void setSettings(@Nullable android.audio.policy.capengine.configuration.SettingsType); + } + + public class ConfigurableDomains { + ctor public ConfigurableDomains(); + method @Nullable public java.util.List getConfigurableDomain(); + method @Nullable public String getSystemClassName(); + method public void setSystemClassName(@Nullable String); + } + + public class ConfigurableElementSettingsType { + ctor public ConfigurableElementSettingsType(); + method @Nullable public android.audio.policy.capengine.configuration.BitParameterBlockType getBitParameterBlock_optional(); + method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getBitParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.BooleanParameterType getBooleanParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.EnumParameterType getEnumParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFixedPointParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFloatingPointParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getIntegerParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterBlockType getParameterBlock_optional(); + method @Nullable public String getPath(); + method @Nullable public android.audio.policy.capengine.configuration.StringParameterType getStringParameter_optional(); + method public void setBitParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.BitParameterBlockType); + method public void setBitParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType); + method public void setBooleanParameter_optional(@Nullable android.audio.policy.capengine.configuration.BooleanParameterType); + method public void setEnumParameter_optional(@Nullable android.audio.policy.capengine.configuration.EnumParameterType); + method public void setFixedPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType); + method public void setFloatingPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType); + method public void setIntegerParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType); + method public void setParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.ParameterBlockType); + method public void setPath(@Nullable String); + method public void setStringParameter_optional(@Nullable android.audio.policy.capengine.configuration.StringParameterType); + } + + public class ConfigurableElementsType { + ctor public ConfigurableElementsType(); + method @Nullable public java.util.List getConfigurableElement(); + } + + public static class ConfigurableElementsType.ConfigurableElement { + ctor public ConfigurableElementsType.ConfigurableElement(); + method @Nullable public String getPath(); + method public void setPath(@Nullable String); + } + + public class ConfigurationsType { + ctor public ConfigurationsType(); + method @Nullable public java.util.List getConfiguration(); + } + + public static class ConfigurationsType.Configuration { + ctor public ConfigurationsType.Configuration(); + method @Nullable public android.audio.policy.capengine.configuration.CompoundRuleType getCompoundRule(); + method @Nullable public String getName(); + method public void setCompoundRule(@Nullable android.audio.policy.capengine.configuration.CompoundRuleType); + method public void setName(@Nullable String); + } + + public class EnumParameterType { + ctor public EnumParameterType(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method @Nullable public String getValue(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + method public void setValue(@Nullable String); + } + + public class IntegerParameterType { + ctor public IntegerParameterType(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method @Nullable public String getValue(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + method public void setValue(@Nullable String); + } + + public enum LangEnum { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.capengine.configuration.LangEnum EMPTY; + } + + public enum MatchesWhenEnum { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Excludes; + enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Includes; + enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Is; + enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum IsNot; + } + + public class ParameterBlockType { + ctor public ParameterBlockType(); + method @Nullable public android.audio.policy.capengine.configuration.BitParameterBlockType getBitParameterBlock_optional(); + method @Nullable public android.audio.policy.capengine.configuration.BooleanParameterType getBooleanParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.EnumParameterType getEnumParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFixedPointParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFloatingPointParameter_optional(); + method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getIntegerParameter_optional(); + method @Nullable public String getName(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterBlockType getParameterBlock_optional(); + method @Nullable public android.audio.policy.capengine.configuration.StringParameterType getStringParameter_optional(); + method public void setBitParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.BitParameterBlockType); + method public void setBooleanParameter_optional(@Nullable android.audio.policy.capengine.configuration.BooleanParameterType); + method public void setEnumParameter_optional(@Nullable android.audio.policy.capengine.configuration.EnumParameterType); + method public void setFixedPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType); + method public void setFloatingPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType); + method public void setIntegerParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType); + method public void setName(@Nullable String); + method public void setParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.ParameterBlockType); + method public void setStringParameter_optional(@Nullable android.audio.policy.capengine.configuration.StringParameterType); + } + + public enum ParameterNameEnumType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ambient; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType anlg_dock_headset; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType aux_line; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType back_mic; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_broadcast; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_headset; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_speaker; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp_headphones; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp_speaker; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_ble; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco_carkit; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco_headset; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType builtin_mic; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bus; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType communication; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType device_address; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType dgtl_dock_headset; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType earpiece; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType echo_canceller; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType echo_reference; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType fm; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType fm_tuner; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi_arc; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi_earc; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hearing_aid; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ip; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType line; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType loopback; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType proxy; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType remote_submix; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType spdif; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType speaker; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType speaker_safe; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType stub; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType telephony_rx; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType telephony_tx; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType tv_tuner; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_accessory; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_device; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_headset; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType volume_profile; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType wired_headphone; + enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType wired_headset; + } + + public class PointParameterType { + ctor public PointParameterType(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method @Nullable public String getValue(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + method public void setValue(@Nullable String); + } + + public class SelectionCriterionRuleType { + ctor public SelectionCriterionRuleType(); + method @Nullable public android.audio.policy.capengine.configuration.MatchesWhenEnum getMatchesWhen(); + method @Nullable public String getSelectionCriterion(); + method @Nullable public String getValue(); + method public void setMatchesWhen(@Nullable android.audio.policy.capengine.configuration.MatchesWhenEnum); + method public void setSelectionCriterion(@Nullable String); + method public void setValue(@Nullable String); + } + + public class SettingsType { + ctor public SettingsType(); + method @Nullable public java.util.List getConfiguration(); + } + + public static class SettingsType.Configuration { + ctor public SettingsType.Configuration(); + method @Nullable public java.util.List getConfigurableElement(); + method @Nullable public String getName(); + method public void setName(@Nullable String); + } + + public enum SpaceEnum { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.capengine.configuration.SpaceEnum _default; + enum_constant public static final android.audio.policy.capengine.configuration.SpaceEnum preserve; + } + + public class StringParameterType { + ctor public StringParameterType(); + method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName(); + method @Nullable public String getValue(); + method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType); + method public void setValue(@Nullable String); + } + + public enum TypeEnum { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.capengine.configuration.TypeEnum All; + enum_constant public static final android.audio.policy.capengine.configuration.TypeEnum Any; + } + + public class XmlParser { + ctor public XmlParser(); + method @Nullable public static android.audio.policy.capengine.configuration.ConfigurableDomains readConfigurableDomains(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + } + +} + diff --git a/audio/config/audioPolicy/capengine/api/last_current.txt b/audio/config/audioPolicy/capengine/api/last_current.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/capengine/api/last_removed.txt b/audio/config/audioPolicy/capengine/api/last_removed.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/capengine/api/removed.txt b/audio/config/audioPolicy/capengine/api/removed.txt new file mode 100644 index 0000000..d802177 --- /dev/null +++ b/audio/config/audioPolicy/capengine/api/removed.txt @@ -0,0 +1 @@ +// Signature format: 2.0 diff --git a/audio/config/audioPolicy/engine/Android.bp b/audio/config/audioPolicy/engine/Android.bp new file mode 100644 index 0000000..5d62bd6 --- /dev/null +++ b/audio/config/audioPolicy/engine/Android.bp @@ -0,0 +1,16 @@ +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +xsd_config { + name: "audio_policy_engine_configuration_aidl_default", + srcs: ["audio_policy_engine_configuration.xsd"], + package_name: "android.audio.policy.engine.configuration", + nullability: true, +} diff --git a/audio/config/audioPolicy/engine/api/current.txt b/audio/config/audioPolicy/engine/api/current.txt new file mode 100644 index 0000000..41cfb44 --- /dev/null +++ b/audio/config/audioPolicy/engine/api/current.txt @@ -0,0 +1,346 @@ +// Signature format: 2.0 +package android.audio.policy.engine.configuration { + + public class AttributesGroup { + ctor public AttributesGroup(); + method @Nullable public java.util.List getAttributes_optional(); + method @Nullable public android.audio.policy.engine.configuration.BundleType getBundle_optional(); + method @Nullable public android.audio.policy.engine.configuration.ContentTypeType getContentType_optional(); + method @Nullable public android.audio.policy.engine.configuration.FlagsType getFlags_optional(); + method @Nullable public android.audio.policy.engine.configuration.SourceType getSource_optional(); + method @Nullable public android.audio.policy.engine.configuration.Stream getStreamType(); + method @Nullable public android.audio.policy.engine.configuration.UsageType getUsage_optional(); + method @Nullable public String getVolumeGroup(); + method public void setBundle_optional(@Nullable android.audio.policy.engine.configuration.BundleType); + method public void setContentType_optional(@Nullable android.audio.policy.engine.configuration.ContentTypeType); + method public void setFlags_optional(@Nullable android.audio.policy.engine.configuration.FlagsType); + method public void setSource_optional(@Nullable android.audio.policy.engine.configuration.SourceType); + method public void setStreamType(@Nullable android.audio.policy.engine.configuration.Stream); + method public void setUsage_optional(@Nullable android.audio.policy.engine.configuration.UsageType); + method public void setVolumeGroup(@Nullable String); + } + + public class AttributesRef { + ctor public AttributesRef(); + method @Nullable public java.util.List getReference(); + } + + public class AttributesRefType { + ctor public AttributesRefType(); + method @Nullable public android.audio.policy.engine.configuration.AttributesType getAttributes(); + method @Nullable public String getName(); + method public void setAttributes(@Nullable android.audio.policy.engine.configuration.AttributesType); + method public void setName(@Nullable String); + } + + public class AttributesType { + ctor public AttributesType(); + method @Nullable public String getAttributesRef(); + method @Nullable public android.audio.policy.engine.configuration.BundleType getBundle(); + method @Nullable public android.audio.policy.engine.configuration.ContentTypeType getContentType(); + method @Nullable public android.audio.policy.engine.configuration.FlagsType getFlags(); + method @Nullable public android.audio.policy.engine.configuration.SourceType getSource(); + method @Nullable public android.audio.policy.engine.configuration.UsageType getUsage(); + method public void setAttributesRef(@Nullable String); + method public void setBundle(@Nullable android.audio.policy.engine.configuration.BundleType); + method public void setContentType(@Nullable android.audio.policy.engine.configuration.ContentTypeType); + method public void setFlags(@Nullable android.audio.policy.engine.configuration.FlagsType); + method public void setSource(@Nullable android.audio.policy.engine.configuration.SourceType); + method public void setUsage(@Nullable android.audio.policy.engine.configuration.UsageType); + } + + public class BundleType { + ctor public BundleType(); + method @Nullable public String getKey(); + method @Nullable public String getValue(); + method public void setKey(@Nullable String); + method public void setValue(@Nullable String); + } + + public class Configuration { + ctor public Configuration(); + method @Nullable public java.util.List getAttributesRef(); + method @Nullable public java.util.List getCriteria(); + method @Nullable public java.util.List getCriterion_types(); + method @Nullable public java.util.List getProductStrategies(); + method @Nullable public android.audio.policy.engine.configuration.Version getVersion(); + method @Nullable public java.util.List getVolumeGroups(); + method @Nullable public java.util.List getVolumes(); + method public void setVersion(@Nullable android.audio.policy.engine.configuration.Version); + } + + public enum ContentType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_MOVIE; + enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_MUSIC; + enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_SONIFICATION; + enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_SPEECH; + enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_UNKNOWN; + } + + public class ContentTypeType { + ctor public ContentTypeType(); + method @Nullable public android.audio.policy.engine.configuration.ContentType getValue(); + method public void setValue(@Nullable android.audio.policy.engine.configuration.ContentType); + } + + public class CriteriaType { + ctor public CriteriaType(); + method @Nullable public java.util.List getCriterion(); + } + + public class CriterionType { + ctor public CriterionType(); + method @Nullable public String getName(); + method @Nullable public String getType(); + method @Nullable public String get_default(); + method public void setName(@Nullable String); + method public void setType(@Nullable String); + method public void set_default(@Nullable String); + } + + public class CriterionTypeType { + ctor public CriterionTypeType(); + method @Nullable public String getName(); + method @Nullable public android.audio.policy.engine.configuration.PfwCriterionTypeEnum getType(); + method @Nullable public android.audio.policy.engine.configuration.ValuesType getValues(); + method public void setName(@Nullable String); + method public void setType(@Nullable android.audio.policy.engine.configuration.PfwCriterionTypeEnum); + method public void setValues(@Nullable android.audio.policy.engine.configuration.ValuesType); + } + + public class CriterionTypesType { + ctor public CriterionTypesType(); + method @Nullable public java.util.List getCriterion_type(); + } + + public enum DeviceCategory { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_EARPIECE; + enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_EXT_MEDIA; + enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_HEADSET; + enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_HEARING_AID; + enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_SPEAKER; + } + + public enum FlagType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_AUDIBILITY_ENFORCED; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BEACON; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BYPASS_MUTE; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_CAPTURE_PRIVATE; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_DEEP_BUFFER; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_HW_AV_SYNC; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_HW_HOTWORD; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_LOW_LATENCY; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_MUTE_HAPTIC; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NONE; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NO_MEDIA_PROJECTION; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NO_SYSTEM_CAPTURE; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_SCO; + enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_SECURE; + } + + public class FlagsType { + ctor public FlagsType(); + method @Nullable public java.util.List getValue(); + method public void setValue(@Nullable java.util.List); + } + + public enum ForcedConfigCommunicationDeviceType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType BT_BLE; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType BT_SCO; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType NONE; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType SPEAKER; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType WIRED_ACCESSORY; + } + + public enum ForcedConfigDockType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType ANALOG_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType BT_CAR_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType BT_DESK_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType DIGITAL_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType NONE; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType WIRED_ACCESSORY; + } + + public enum ForcedConfigMediaDeviceType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType ANALOG_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType BT_A2DP; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType DIGITAL_DOCK; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType HEADPHONES; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType NONE; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType NO_BT_A2DP; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType SPEAKER; + enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType WIRED_ACCESSORY; + } + + public enum ForcedEncodingSourroundConfigType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType ALWAYS; + enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType MANUAL; + enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType NEVER; + enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType UNSPECIFIED; + } + + public enum PfwCriterionTypeEnum { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.PfwCriterionTypeEnum exclusive; + enum_constant public static final android.audio.policy.engine.configuration.PfwCriterionTypeEnum inclusive; + } + + public class ProductStrategies { + ctor public ProductStrategies(); + method @Nullable public java.util.List getProductStrategy(); + } + + public static class ProductStrategies.ProductStrategy { + ctor public ProductStrategies.ProductStrategy(); + method @Nullable public java.util.List getAttributesGroup(); + method @Nullable public int getId(); + method @Nullable public String getName(); + method public void setId(@Nullable int); + method public void setName(@Nullable String); + } + + public enum SourceEnumType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_CAMCORDER; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_DEFAULT; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_ECHO_REFERENCE; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_FM_TUNER; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_MIC; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_REMOTE_SUBMIX; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_UNPROCESSED; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_CALL; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_COMMUNICATION; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_DOWNLINK; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_PERFORMANCE; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_RECOGNITION; + enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_UPLINK; + } + + public class SourceType { + ctor public SourceType(); + method @Nullable public android.audio.policy.engine.configuration.SourceEnumType getValue(); + method public void setValue(@Nullable android.audio.policy.engine.configuration.SourceEnumType); + } + + public enum Stream { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ACCESSIBILITY; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ALARM; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ASSISTANT; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_BLUETOOTH_SCO; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_DEFAULT; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_DTMF; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ENFORCED_AUDIBLE; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_MUSIC; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_NOTIFICATION; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_RING; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_SYSTEM; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_TTS; + enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_VOICE_CALL; + } + + public enum UsageEnumType { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ALARM; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ANNOUNCEMENT; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_SONIFICATION; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANT; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_CALL_ASSISTANT; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_EMERGENCY; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_GAME; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_MEDIA; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_EVENT; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_SAFETY; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_SPEAKER_CLEANUP; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_UNKNOWN; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VEHICLE_STATUS; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VIRTUAL_SOURCE; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VOICE_COMMUNICATION; + enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING; + } + + public class UsageType { + ctor public UsageType(); + method @Nullable public android.audio.policy.engine.configuration.UsageEnumType getValue(); + method public void setValue(@Nullable android.audio.policy.engine.configuration.UsageEnumType); + } + + public class ValueType { + ctor public ValueType(); + method @Nullable public String getAndroid_type(); + method @Nullable public String getLiteral(); + method public void setAndroid_type(@Nullable String); + method public void setLiteral(@Nullable String); + } + + public class ValuesType { + ctor public ValuesType(); + method @Nullable public java.util.List getValue(); + } + + public enum Version { + method @NonNull public String getRawName(); + enum_constant public static final android.audio.policy.engine.configuration.Version _1_0; + } + + public class Volume { + ctor public Volume(); + method @Nullable public android.audio.policy.engine.configuration.DeviceCategory getDeviceCategory(); + method @Nullable public java.util.List getPoint(); + method @Nullable public String getRef(); + method public void setDeviceCategory(@Nullable android.audio.policy.engine.configuration.DeviceCategory); + method public void setRef(@Nullable String); + } + + public class VolumeGroupsType { + ctor public VolumeGroupsType(); + method @Nullable public java.util.List getVolumeGroup(); + } + + public static class VolumeGroupsType.VolumeGroup { + ctor public VolumeGroupsType.VolumeGroup(); + method @Nullable public int getIndexMax(); + method @Nullable public int getIndexMin(); + method @Nullable public String getName(); + method @Nullable public java.util.List getVolume(); + method public void setIndexMax(@Nullable int); + method public void setIndexMin(@Nullable int); + method public void setName(@Nullable String); + } + + public class VolumeRef { + ctor public VolumeRef(); + method @Nullable public String getName(); + method @Nullable public java.util.List getPoint(); + method public void setName(@Nullable String); + } + + public class VolumesType { + ctor public VolumesType(); + method @Nullable public java.util.List getReference(); + } + + public class XmlParser { + ctor public XmlParser(); + method @Nullable public static android.audio.policy.engine.configuration.Configuration read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException; + } + +} + diff --git a/audio/config/audioPolicy/engine/api/last_current.txt b/audio/config/audioPolicy/engine/api/last_current.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/engine/api/last_removed.txt b/audio/config/audioPolicy/engine/api/last_removed.txt new file mode 100644 index 0000000..e69de29 diff --git a/audio/config/audioPolicy/engine/api/removed.txt b/audio/config/audioPolicy/engine/api/removed.txt new file mode 100644 index 0000000..d802177 --- /dev/null +++ b/audio/config/audioPolicy/engine/api/removed.txt @@ -0,0 +1 @@ +// Signature format: 2.0 diff --git a/audio/config/audioPolicy/engine/audio_policy_engine_configuration.xsd b/audio/config/audioPolicy/engine/audio_policy_engine_configuration.xsd new file mode 100644 index 0000000..02250c7 --- /dev/null +++ b/audio/config/audioPolicy/engine/audio_policy_engine_configuration.xsd @@ -0,0 +1,479 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Criterion type is provided as a pair of 'human readable' string (referred as the + literal part, that will allow to express 'human readable' rules and an optional + android type. + This android type is reserved for device type mapping with parameter framework + representation on a bitfield (Only one bit is expected to represent a device) and + android representation of a type that may use several bits. + The lookup table will allow wrap android device type to parameter framework device + types data model. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Volume section defines a volume curve for a given use case and device category. + It contains a list of points of this curve expressing the attenuation in Millibels + for a given volume index from 0 to 100. + + 0,-9600 + 100,0 + + + It may also reference a reference/@name to avoid duplicating curves. + + + 0,-9600 + 100,0 + + + + + + + + + + + + + + + + + + + + + Comma separated pair of number. + The fist one is the framework level (between 0 and 100). + The second one is the volume to send to the HAL. + The framework will interpolate volumes not specified. + Their MUST be at least 2 points specified. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/audio/deprecated/StreamSwitcher.cpp b/audio/deprecated/StreamSwitcher.cpp new file mode 100644 index 0000000..f1e0f52 --- /dev/null +++ b/audio/deprecated/StreamSwitcher.cpp @@ -0,0 +1,271 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_StreamSwitcher" + +#include +#include +#include + +#include "core-impl/StreamStub.h" +#include "deprecated/StreamSwitcher.h" + +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::media::audio::common::AudioDevice; + +namespace aidl::android::hardware::audio::core::deprecated { + +StreamSwitcher::StreamSwitcher(StreamContext* context, const Metadata& metadata) + : mContext(context), + mMetadata(metadata), + mStream(new InnerStreamWrapper(context, mMetadata)) {} + +ndk::ScopedAStatus StreamSwitcher::closeCurrentStream(bool validateStreamState) { + if (!mStream) return ndk::ScopedAStatus::ok(); + RETURN_STATUS_IF_ERROR(mStream->prepareToClose()); + RETURN_STATUS_IF_ERROR(mStream->close()); + if (validateStreamState && !isValidClosingStreamState(mStream->getStatePriorToClosing())) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + mStream.reset(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::close() { + if (mStream != nullptr) { + auto status = closeCurrentStream(false /*validateStreamState*/); + // The actual state is irrelevant since only StreamSwitcher cares about it. + onClose(StreamDescriptor::State::STANDBY); + return status; + } + LOG(ERROR) << __func__ << ": stream was already closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); +} + +ndk::ScopedAStatus StreamSwitcher::prepareToClose() { + if (mStream != nullptr) { + return mStream->prepareToClose(); + } + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); +} + +ndk::ScopedAStatus StreamSwitcher::updateHwAvSyncId(int32_t in_hwAvSyncId) { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + RETURN_STATUS_IF_ERROR(mStream->updateHwAvSyncId(in_hwAvSyncId)); + mHwAvSyncId = in_hwAvSyncId; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (mIsStubStream) { + LOG(ERROR) << __func__ << ": the stream is not connected"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return mStream->getVendorParameters(in_ids, _aidl_return); +} + +ndk::ScopedAStatus StreamSwitcher::setVendorParameters( + const std::vector& in_parameters, bool in_async) { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (mIsStubStream) { + mMissedParameters.emplace_back(in_parameters, in_async); + return ndk::ScopedAStatus::ok(); + } + return mStream->setVendorParameters(in_parameters, in_async); +} + +ndk::ScopedAStatus StreamSwitcher::addEffect(const std::shared_ptr& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": null effect"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (!mIsStubStream) { + RETURN_STATUS_IF_ERROR(mStream->addEffect(in_effect)); + } + mEffects.push_back(in_effect); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::removeEffect(const std::shared_ptr& in_effect) { + if (in_effect == nullptr) { + LOG(DEBUG) << __func__ << ": null effect"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + for (auto it = mEffects.begin(); it != mEffects.end(); ++it) { + if ((*it)->asBinder() == in_effect->asBinder()) { + mEffects.erase(it); + break; + } + } + return !mIsStubStream ? mStream->removeEffect(in_effect) : ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::getStreamCommonCommon( + std::shared_ptr* _aidl_return) { + if (!mCommon) { + LOG(FATAL) << __func__ << ": the common interface was not created"; + } + *_aidl_return = mCommon.getInstance(); + LOG(DEBUG) << __func__ << ": returning " << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::updateMetadataCommon(const Metadata& metadata) { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + mMetadata = metadata; + return !mIsStubStream ? mStream->updateMetadataCommon(metadata) : ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::initInstance( + const std::shared_ptr& delegate) { + mCommon = ndk::SharedRefBase::make(delegate); + // The delegate is null because StreamSwitcher handles IStreamCommon methods by itself. + return mStream->initInstance(nullptr); +} + +const StreamContext& StreamSwitcher::getContext() const { + return *mContext; +} + +bool StreamSwitcher::isClosed() const { + return mStream == nullptr || mStream->isClosed(); +} + +const StreamCommonInterface::ConnectedDevices& StreamSwitcher::getConnectedDevices() const { + return mStream->getConnectedDevices(); +} + +ndk::ScopedAStatus StreamSwitcher::setConnectedDevices(const std::vector& devices) { + LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(devices); + if (mStream->getConnectedDevices() == devices) return ndk::ScopedAStatus::ok(); + const DeviceSwitchBehavior behavior = switchCurrentStream(devices); + if (behavior == DeviceSwitchBehavior::UNSUPPORTED_DEVICES) { + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } else if (behavior == DeviceSwitchBehavior::SWITCH_TO_STUB_STREAM && !devices.empty()) { + // This is an error in the extending class. + LOG(FATAL) << __func__ + << ": switching to stub stream with connected devices is not allowed"; + } + if (behavior == USE_CURRENT_STREAM) { + mIsStubStream = false; + } else { + LOG(DEBUG) << __func__ << ": connected devices changed, switching stream"; + // Two streams can't be opened for the same context, thus we always need to close + // the current one before creating a new one. + RETURN_STATUS_IF_ERROR(closeCurrentStream(true /*validateStreamState*/)); + if (behavior == CREATE_NEW_STREAM) { + mStream = createNewStream(devices, mContext, mMetadata); + mIsStubStream = false; + } else { // SWITCH_TO_STUB_STREAM + mStream.reset(new InnerStreamWrapper(mContext, mMetadata)); + mIsStubStream = true; + } + // The delegate is null because StreamSwitcher handles IStreamCommon methods by itself. + if (ndk::ScopedAStatus status = mStream->initInstance(nullptr); !status.isOk()) { + if (mIsStubStream) { + LOG(FATAL) << __func__ + << ": failed to initialize stub stream: " << status.getDescription(); + } + // Need to close the current failed stream, and report an error. + // Since we can't operate without a stream implementation, put a stub in. + RETURN_STATUS_IF_ERROR(closeCurrentStream(false /*validateStreamState*/)); + mStream.reset(new InnerStreamWrapper(mContext, mMetadata)); + (void)mStream->initInstance(nullptr); + (void)mStream->setConnectedDevices(devices); + return status; + } + } + RETURN_STATUS_IF_ERROR(mStream->setConnectedDevices(devices)); + if (behavior == CREATE_NEW_STREAM) { + // These updates are less critical, only log warning on failure. + if (mHwAvSyncId.has_value()) { + if (auto status = mStream->updateHwAvSyncId(*mHwAvSyncId); !status.isOk()) { + LOG(WARNING) << __func__ << ": could not update HW AV Sync for a new stream: " + << status.getDescription(); + } + } + for (const auto& vndParam : mMissedParameters) { + if (auto status = mStream->setVendorParameters(vndParam.first, vndParam.second); + !status.isOk()) { + LOG(WARNING) << __func__ << ": error while setting parameters for a new stream: " + << status.getDescription(); + } + } + mMissedParameters.clear(); + for (const auto& effect : mEffects) { + if (auto status = mStream->addEffect(effect); !status.isOk()) { + LOG(WARNING) << __func__ << ": error while adding effect for a new stream: " + << status.getDescription(); + } + } + if (mBluetoothParametersUpdated) { + if (auto status = mStream->bluetoothParametersUpdated(); !status.isOk()) { + LOG(WARNING) << __func__ + << ": error while updating BT parameters for a new stream: " + << status.getDescription(); + } + } + mBluetoothParametersUpdated = false; + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamSwitcher::bluetoothParametersUpdated() { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + if (mIsStubStream) { + mBluetoothParametersUpdated = true; + return ndk::ScopedAStatus::ok(); + } + return mStream->bluetoothParametersUpdated(); +} + +ndk::ScopedAStatus StreamSwitcher::setGain(float gain) { + if (mStream == nullptr) { + LOG(ERROR) << __func__ << ": stream was closed"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + return mStream->setGain(gain); +} + +} // namespace aidl::android::hardware::audio::core::deprecated diff --git a/audio/deprecated/StreamSwitcher.h b/audio/deprecated/StreamSwitcher.h new file mode 100644 index 0000000..56fdd23 --- /dev/null +++ b/audio/deprecated/StreamSwitcher.h @@ -0,0 +1,202 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + ** This class is deprecated because its use causes threading issues + ** with the FMQ due to change of threads reading and writing into FMQ. + ** + ** DO NOT USE. These files will be removed. + **/ + +#pragma once + +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core::deprecated { + +// 'StreamSwitcher' is an implementation of 'StreamCommonInterface' which allows +// dynamically switching the underlying stream implementation based on currently +// connected devices. This is achieved by replacing inheritance from +// 'StreamCommonImpl' with owning an instance of it. StreamSwitcher must be +// extended in order to supply the logic for choosing the stream +// implementation. When there are no connected devices, for instance, upon the +// creation, the StreamSwitcher engages an instance of a stub stream in order to +// keep serving requests coming via 'StreamDescriptor'. +// +// StreamSwitcher implements the 'IStreamCommon' interface directly, with +// necessary delegation to the current stream implementation. While the stub +// stream is engaged, any requests made via 'IStreamCommon' (parameters, effects +// setting, etc) are postponed and only delivered on device connection change +// to the "real" stream implementation provided by the extending class. This is why +// the behavior of StreamSwitcher in the "stub" state is not identical to behavior +// of 'StreamStub'. It can become a full substitute for 'StreamStub' once +// device connection change event occurs and the extending class returns +// 'LEAVE_CURRENT_STREAM' from 'switchCurrentStream' method. +// +// There is a natural limitation that the current stream implementation may only +// be switched when the stream is in the 'STANDBY' state. Thus, when the event +// to switch the stream occurs, the current stream is stopped and joined, and +// its last state is validated. Since the change of the set of connected devices +// normally occurs on patch updates, if the stream was not in standby, this is +// reported to the caller of 'IModule.setAudioPatch' as the 'EX_ILLEGAL_STATE' +// error. +// +// The simplest use case, when the implementor just needs to emulate the legacy HAL API +// behavior of receiving the connected devices upon stream creation, the implementation +// of the extending class can look as follows. We assume that 'StreamLegacy' implementation +// is the one requiring to know connected devices on creation: +// +// class StreamLegacy : public StreamCommonImpl { +// public: +// StreamLegacy(StreamContext* context, const Metadata& metadata, +// const std::vector& devices); +// }; +// +// class StreamOutLegacy final : public StreamOut, public StreamSwitcher { +// public: +// StreamOutLegacy(StreamContext&& context, metatadata etc.) +// private: +// DeviceSwitchBehavior switchCurrentStream(const std::vector&) override { +// // This implementation effectively postpones stream creation until +// // receiving the first call to 'setConnectedDevices' with a non-empty list. +// return isStubStream() ? DeviceSwitchBehavior::CREATE_NEW_STREAM : +// DeviceSwitchBehavior::USE_CURRENT_STREAM; +// } +// std::unique_ptr createNewStream( +// const std::vector& devices, +// StreamContext* context, const Metadata& metadata) override { +// return std::unique_ptr(new InnerStreamWrapper( +// context, metadata, devices)); +// } +// void onClose(StreamDescriptor::State) override { defaultOnClose(); } +// } +// + +class StreamCommonInterfaceEx : virtual public StreamCommonInterface { + public: + virtual StreamDescriptor::State getStatePriorToClosing() const = 0; +}; + +template +class InnerStreamWrapper : public T, public StreamCommonInterfaceEx { + public: + template + InnerStreamWrapper(Args&&... args) : T(std::forward(args)...) {} + StreamDescriptor::State getStatePriorToClosing() const override { return mStatePriorToClosing; } + + private: + // Do not need to do anything on close notification from the inner stream + // because StreamSwitcher handles IStreamCommon::close by itself. + void onClose(StreamDescriptor::State statePriorToClosing) override { + mStatePriorToClosing = statePriorToClosing; + } + + StreamDescriptor::State mStatePriorToClosing = StreamDescriptor::State::STANDBY; +}; + +class StreamSwitcher : virtual public StreamCommonInterface { + public: + StreamSwitcher(StreamContext* context, const Metadata& metadata); + + ndk::ScopedAStatus close() override; + ndk::ScopedAStatus prepareToClose() override; + ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override; + ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) override; + ndk::ScopedAStatus setVendorParameters(const std::vector& in_parameters, + bool in_async) override; + ndk::ScopedAStatus addEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + ndk::ScopedAStatus removeEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + + ndk::ScopedAStatus getStreamCommonCommon(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override; + + ndk::ScopedAStatus initInstance( + const std::shared_ptr& delegate) override; + const StreamContext& getContext() const override; + bool isClosed() const override; + const ConnectedDevices& getConnectedDevices() const override; + ndk::ScopedAStatus setConnectedDevices( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) + override; + ndk::ScopedAStatus bluetoothParametersUpdated() override; + ndk::ScopedAStatus setGain(float gain) override; + + protected: + // Since switching a stream requires closing down the current stream, StreamSwitcher + // asks the extending class its intent on the connected devices change. + enum DeviceSwitchBehavior { + // Continue using the current stream implementation. If it's the stub implementation, + // StreamSwitcher starts treating the stub stream as a "real" implementation, + // without effectively closing it and starting again. + USE_CURRENT_STREAM, + // This is the normal case when the extending class provides a "real" implementation + // which is not a stub implementation. + CREATE_NEW_STREAM, + // This is the case when the extending class wants to revert back to the initial + // condition of using a stub stream provided by the StreamSwitcher. This behavior + // is only allowed when the list of connected devices is empty. + SWITCH_TO_STUB_STREAM, + // Use when the set of devices is not supported by the extending class. This returns + // 'EX_UNSUPPORTED_OPERATION' from 'setConnectedDevices'. + UNSUPPORTED_DEVICES, + }; + // StreamSwitcher will call these methods from 'setConnectedDevices'. If the switch behavior + // is 'CREATE_NEW_STREAM', the 'createwNewStream' function will be called (with the same + // device vector) for obtaining a new stream implementation, assuming that closing + // the current stream was a success. + virtual DeviceSwitchBehavior switchCurrentStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) = 0; + virtual std::unique_ptr createNewStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices, + StreamContext* context, const Metadata& metadata) = 0; + virtual void onClose(StreamDescriptor::State streamPriorToClosing) = 0; + + bool isStubStream() const { return mIsStubStream; } + StreamCommonInterfaceEx* getCurrentStream() const { return mStream.get(); } + + private: + using VndParam = std::pair, bool /*isAsync*/>; + + static constexpr bool isValidClosingStreamState(StreamDescriptor::State state) { + return state == StreamDescriptor::State::STANDBY || state == StreamDescriptor::State::ERROR; + } + + ndk::ScopedAStatus closeCurrentStream(bool validateStreamState); + + // StreamSwitcher does not own the context. + StreamContext* mContext; + Metadata mMetadata; + ChildInterface mCommon; + // The current stream. + std::unique_ptr mStream; + // Indicates whether 'mCurrentStream' is a stub stream implementation + // maintained by StreamSwitcher until the extending class provides a "real" + // implementation. The invariant of this state is that there are no connected + // devices. + bool mIsStubStream = true; + // Storage for the data from commands received via 'IStreamCommon'. + std::optional mHwAvSyncId; + std::vector mMissedParameters; + std::vector> mEffects; + bool mBluetoothParametersUpdated = false; +}; + +} // namespace aidl::android::hardware::audio::core::deprecated diff --git a/audio/downmix/Android.bp b/audio/downmix/Android.bp new file mode 100644 index 0000000..e5e8405 --- /dev/null +++ b/audio/downmix/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libdownmixsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "DownmixSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default", + ], +} diff --git a/audio/downmix/DownmixSw.cpp b/audio/downmix/DownmixSw.cpp new file mode 100644 index 0000000..19ab2e8 --- /dev/null +++ b/audio/downmix/DownmixSw.cpp @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_DownmixSw" +#include +#include +#include + +#include "DownmixSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::DownmixSw; +using aidl::android::hardware::audio::effect::getEffectImplUuidDownmixSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmixSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmixSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = DownmixSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string DownmixSw::kEffectName = "DownmixSw"; +const Descriptor DownmixSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidDownmix(), + .uuid = getEffectImplUuidDownmixSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus DownmixSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus DownmixSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& dmParam = specific.get(); + auto tag = dmParam.getTag(); + + switch (tag) { + case Downmix::type: { + RETURN_IF(mContext->setDmType(dmParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setTypeFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "DownmixTagNotSupported"); + } + } +} + +ndk::ScopedAStatus DownmixSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::downmixTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto dmId = id.get(); + auto dmIdTag = dmId.getTag(); + switch (dmIdTag) { + case Downmix::Id::commonTag: + return getParameterDownmix(dmId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "DownmixTagNotSupported"); + } +} + +ndk::ScopedAStatus DownmixSw::getParameterDownmix(const Downmix::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + Downmix dmParam; + switch (tag) { + case Downmix::type: { + dmParam.set(mContext->getDmType()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "DownmixTagNotSupported"); + } + } + + specific->set(dmParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr DownmixSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode DownmixSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status DownmixSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/downmix/DownmixSw.h b/audio/downmix/DownmixSw.h new file mode 100644 index 0000000..1a9f0f0 --- /dev/null +++ b/audio/downmix/DownmixSw.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class DownmixSwContext final : public EffectContext { + public: + DownmixSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setDmType(Downmix::Type type) { + // TODO : Add implementation to apply new type + mType = type; + return RetCode::SUCCESS; + } + Downmix::Type getDmType() const { return mType; } + + private: + Downmix::Type mType = Downmix::Type::STRIP; +}; + +class DownmixSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + DownmixSw() { LOG(DEBUG) << __func__; } + ~DownmixSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int sample) + REQUIRES(mImplMutex) override; + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); + + ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/dynamicProcessing/Android.bp b/audio/dynamicProcessing/Android.bp new file mode 100644 index 0000000..ccd1aa0 --- /dev/null +++ b/audio/dynamicProcessing/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libdynamicsprocessingsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "DynamicsProcessingSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default", + ], +} diff --git a/audio/dynamicProcessing/DynamicsProcessingSw.cpp b/audio/dynamicProcessing/DynamicsProcessingSw.cpp new file mode 100644 index 0000000..36face1 --- /dev/null +++ b/audio/dynamicProcessing/DynamicsProcessingSw.cpp @@ -0,0 +1,523 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#define LOG_TAG "AHAL_DynamicsProcessingSw" +#include +#include +#include + +#include "DynamicsProcessingSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::DynamicsProcessingSw; +using aidl::android::hardware::audio::effect::getEffectImplUuidDynamicsProcessingSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessingSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessingSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = DynamicsProcessingSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string DynamicsProcessingSw::kEffectName = "DynamicsProcessingSw"; +const DynamicsProcessing::EqBandConfig DynamicsProcessingSw::kEqBandConfigMin = + DynamicsProcessing::EqBandConfig({.channel = 0, + .band = 0, + .enable = false, + .cutoffFrequencyHz = 220, + .gainDb = std::numeric_limits::min()}); +const DynamicsProcessing::EqBandConfig DynamicsProcessingSw::kEqBandConfigMax = + DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits::max(), + .band = std::numeric_limits::max(), + .enable = true, + .cutoffFrequencyHz = 20000, + .gainDb = std::numeric_limits::max()}); +const Range::DynamicsProcessingRange DynamicsProcessingSw::kPreEqBandRange = { + .min = DynamicsProcessing::make( + {DynamicsProcessingSw::kEqBandConfigMin}), + .max = DynamicsProcessing::make( + {DynamicsProcessingSw::kEqBandConfigMax})}; +const Range::DynamicsProcessingRange DynamicsProcessingSw::kPostEqBandRange = { + .min = DynamicsProcessing::make( + {DynamicsProcessingSw::kEqBandConfigMin}), + .max = DynamicsProcessing::make( + {DynamicsProcessingSw::kEqBandConfigMax})}; + +const std::vector DynamicsProcessingSw::kRanges = { + DynamicsProcessingSw::kPreEqBandRange, DynamicsProcessingSw::kPostEqBandRange}; +const Capability DynamicsProcessingSw::kCapability = {.range = DynamicsProcessingSw::kRanges}; + +const Descriptor DynamicsProcessingSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(), + .uuid = getEffectImplUuidDynamicsProcessingSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::POST_PROC, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = DynamicsProcessingSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = DynamicsProcessingSw::kCapability}; + +ndk::ScopedAStatus DynamicsProcessingSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus DynamicsProcessingSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::dynamicsProcessing != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + LOG(INFO) << __func__ << specific.toString(); + auto& dpParam = specific.get(); + auto tag = dpParam.getTag(); + switch (tag) { + case DynamicsProcessing::engineArchitecture: { + RETURN_IF(mContext->setEngineArchitecture( + dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setEngineArchitectureFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::preEq: { + RETURN_IF(mContext->setPreEqChannelCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setPreEqChannelCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::postEq: { + RETURN_IF(mContext->setPostEqChannelCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setPostEqChannelCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::mbc: { + RETURN_IF(mContext->setMbcChannelCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setMbcChannelCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::preEqBand: { + RETURN_IF(mContext->setPreEqBandCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setPreEqBandCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::postEqBand: { + RETURN_IF(mContext->setPostEqBandCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setPostEqBandCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::mbcBand: { + RETURN_IF(mContext->setMbcBandCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setMbcBandCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::limiter: { + RETURN_IF(mContext->setLimiterCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "limiterCfgsFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::inputGain: { + RETURN_IF(mContext->setInputGainCfgs(dpParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "inputGainCfgFailed"); + return ndk::ScopedAStatus::ok(); + } + case DynamicsProcessing::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported"); + } + } +} + +ndk::ScopedAStatus DynamicsProcessingSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::dynamicsProcessingTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto dpId = id.get(); + auto dpIdTag = dpId.getTag(); + switch (dpIdTag) { + case DynamicsProcessing::Id::commonTag: + return getParameterDynamicsProcessing(dpId.get(), + specific); + case DynamicsProcessing::Id::vendorExtensionTag: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(dpIdTag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported"); + } +} + +ndk::ScopedAStatus DynamicsProcessingSw::getParameterDynamicsProcessing( + const DynamicsProcessing::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + DynamicsProcessing dpParam; + switch (tag) { + case DynamicsProcessing::Tag::engineArchitecture: { + dpParam.set(mContext->getEngineArchitecture()); + break; + } + case DynamicsProcessing::Tag::preEq: { + dpParam.set(mContext->getPreEqChannelCfgs()); + break; + } + case DynamicsProcessing::Tag::postEq: { + dpParam.set(mContext->getPostEqChannelCfgs()); + break; + } + case DynamicsProcessing::Tag::mbc: { + dpParam.set(mContext->getMbcChannelCfgs()); + break; + } + case DynamicsProcessing::Tag::preEqBand: { + dpParam.set(mContext->getPreEqBandCfgs()); + break; + } + case DynamicsProcessing::Tag::postEqBand: { + dpParam.set(mContext->getPostEqBandCfgs()); + break; + } + case DynamicsProcessing::Tag::mbcBand: { + dpParam.set(mContext->getMbcBandCfgs()); + break; + } + case DynamicsProcessing::Tag::limiter: { + dpParam.set(mContext->getLimiterCfgs()); + break; + } + case DynamicsProcessing::Tag::inputGain: { + dpParam.set(mContext->getInputGainCfgs()); + break; + } + case DynamicsProcessing::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported"); + } + } + + specific->set(dpParam); + LOG(INFO) << __func__ << specific->toString(); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr DynamicsProcessingSw::createContext( + const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode DynamicsProcessingSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status DynamicsProcessingSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode DynamicsProcessingSwContext::setCommon(const Parameter::Common& common) { + if (auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) { + return ret; + } + mCommon = common; + mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount( + common.input.base.channelMask); + resizeChannels(); + resizeBands(); + LOG(INFO) << __func__ << mCommon.toString(); + return RetCode::SUCCESS; +} + +RetCode DynamicsProcessingSwContext::setEngineArchitecture( + const DynamicsProcessing::EngineArchitecture& cfg) { + RETURN_VALUE_IF(!validateEngineConfig(cfg), RetCode::ERROR_ILLEGAL_PARAMETER, + "illegalEngineConfig"); + + if (mEngineSettings == cfg) { + LOG(INFO) << __func__ << " not change in engine, do nothing"; + return RetCode::SUCCESS; + } + mEngineSettings = cfg; + resizeBands(); + return RetCode::SUCCESS; +} + +RetCode DynamicsProcessingSwContext::setChannelCfgs( + const std::vector& cfgs, + std::vector& targetCfgs, + const DynamicsProcessing::StageEnablement& stage) { + RETURN_VALUE_IF(!stage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse"); + + RetCode ret = RetCode::SUCCESS; + std::unordered_set channelSet; + for (auto& cfg : cfgs) { + if (cfg.channel < 0 || (size_t)cfg.channel >= mChannelCount) { + LOG(ERROR) << __func__ << " skip illegal channel config " << cfg.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + continue; + } + if (0 != channelSet.count(cfg.channel)) { + LOG(WARNING) << __func__ << " duplicated channel " << cfg.channel; + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + } else { + channelSet.insert(cfg.channel); + } + targetCfgs[cfg.channel] = cfg; + } + return ret; +} + +RetCode DynamicsProcessingSwContext::setPreEqChannelCfgs( + const std::vector& cfgs) { + return setChannelCfgs(cfgs, mPreEqChCfgs, mEngineSettings.preEqStage); +} + +RetCode DynamicsProcessingSwContext::setPostEqChannelCfgs( + const std::vector& cfgs) { + return setChannelCfgs(cfgs, mPostEqChCfgs, mEngineSettings.postEqStage); +} + +RetCode DynamicsProcessingSwContext::setMbcChannelCfgs( + const std::vector& cfgs) { + return setChannelCfgs(cfgs, mMbcChCfgs, mEngineSettings.mbcStage); +} + +RetCode DynamicsProcessingSwContext::setEqBandCfgs( + const std::vector& cfgs, + std::vector& targetCfgs, + const DynamicsProcessing::StageEnablement& stage, + const std::vector& channelConfig) { + RETURN_VALUE_IF(!stage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER, "eqStageNotInUse"); + + RetCode ret = RetCode::SUCCESS; + std::set> bandSet; + + for (auto& cfg : cfgs) { + if (0 != bandSet.count({cfg.channel, cfg.band})) { + LOG(WARNING) << __func__ << " duplicated band " << cfg.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + } else { + bandSet.insert({cfg.channel, cfg.band}); + } + if (!validateEqBandConfig(cfg, mChannelCount, stage.bandCount, channelConfig)) { + LOG(WARNING) << __func__ << " skip invalid band " << cfg.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + continue; + } + targetCfgs[cfg.channel * stage.bandCount + cfg.band] = cfg; + } + return ret; +} + +RetCode DynamicsProcessingSwContext::setPreEqBandCfgs( + const std::vector& cfgs) { + return setEqBandCfgs(cfgs, mPreEqChBands, mEngineSettings.preEqStage, mPreEqChCfgs); +} + +RetCode DynamicsProcessingSwContext::setPostEqBandCfgs( + const std::vector& cfgs) { + return setEqBandCfgs(cfgs, mPostEqChBands, mEngineSettings.postEqStage, mPostEqChCfgs); +} + +RetCode DynamicsProcessingSwContext::setMbcBandCfgs( + const std::vector& cfgs) { + RETURN_VALUE_IF(!mEngineSettings.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER, + "mbcNotInUse"); + + RetCode ret = RetCode::SUCCESS; + std::set> bandSet; + + int bandCount = mEngineSettings.mbcStage.bandCount; + std::vector filled(mChannelCount * bandCount, false); + for (auto& it : cfgs) { + if (0 != bandSet.count({it.channel, it.band})) { + LOG(WARNING) << __func__ << " duplicated band " << it.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + } else { + bandSet.insert({it.channel, it.band}); + } + if (!validateMbcBandConfig(it, mChannelCount, mEngineSettings.mbcStage.bandCount, + mMbcChCfgs)) { + LOG(WARNING) << __func__ << " skip invalid band " << it.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + continue; + } + mMbcChBands[it.channel * bandCount + it.band] = it; + } + return ret; +} + +RetCode DynamicsProcessingSwContext::setLimiterCfgs( + const std::vector& cfgs) { + RETURN_VALUE_IF(!mEngineSettings.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER, + "limiterNotInUse"); + + RetCode ret = RetCode::SUCCESS; + std::unordered_set channelSet; + + for (auto& it : cfgs) { + if (0 != channelSet.count(it.channel)) { + LOG(WARNING) << __func__ << " duplicated channel " << it.channel; + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + } else { + channelSet.insert(it.channel); + } + if (!validateLimiterConfig(it, mChannelCount)) { + LOG(WARNING) << __func__ << " skip invalid limiter " << it.toString(); + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + continue; + } + mLimiterCfgs[it.channel] = it; + } + return ret; +} + +void DynamicsProcessingSwContext::resizeChannels() { + if (mPreEqChCfgs.size() != mChannelCount) { + mPreEqChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId}); + } + if (mPostEqChCfgs.size() != mChannelCount) { + mPostEqChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId}); + } + if (mMbcChCfgs.size() != mChannelCount) { + mMbcChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId}); + } + if (mLimiterCfgs.size() != mChannelCount) { + mLimiterCfgs.resize(mChannelCount, {.channel = kInvalidChannelId}); + } + if (mInputGainCfgs.size() != mChannelCount) { + mInputGainCfgs.resize(mChannelCount, {.channel = kInvalidChannelId}); + } +} + +void DynamicsProcessingSwContext::resizeBands() { + if (mPreEqChBands.size() != (size_t)(mChannelCount * mEngineSettings.preEqStage.bandCount)) { + mPreEqChBands.resize(mChannelCount * mEngineSettings.preEqStage.bandCount, + {.channel = kInvalidChannelId}); + } + if (mPostEqChBands.size() != (size_t)(mChannelCount * mEngineSettings.postEqStage.bandCount)) { + mPostEqChBands.resize(mChannelCount * mEngineSettings.postEqStage.bandCount, + {.channel = kInvalidChannelId}); + } + if (mMbcChBands.size() != (size_t)(mChannelCount * mEngineSettings.mbcStage.bandCount)) { + mMbcChBands.resize(mChannelCount * mEngineSettings.mbcStage.bandCount, + {.channel = kInvalidChannelId}); + } +} + +RetCode DynamicsProcessingSwContext::setInputGainCfgs( + const std::vector& cfgs) { + for (const auto& cfg : cfgs) { + RETURN_VALUE_IF(cfg.channel < 0 || (size_t)cfg.channel >= mChannelCount, + RetCode::ERROR_ILLEGAL_PARAMETER, "invalidChannel"); + mInputGainCfgs[cfg.channel] = cfg; + } + return RetCode::SUCCESS; +} + +std::vector DynamicsProcessingSwContext::getInputGainCfgs() { + std::vector ret; + std::copy_if(mInputGainCfgs.begin(), mInputGainCfgs.end(), std::back_inserter(ret), + [&](const auto& gain) { return gain.channel != kInvalidChannelId; }); + return ret; +} + +bool DynamicsProcessingSwContext::validateStageEnablement( + const DynamicsProcessing::StageEnablement& enablement) { + return !enablement.inUse || (enablement.inUse && enablement.bandCount > 0); +} + +bool DynamicsProcessingSwContext::validateEngineConfig( + const DynamicsProcessing::EngineArchitecture& engine) { + return engine.preferredProcessingDurationMs >= 0 && + validateStageEnablement(engine.preEqStage) && + validateStageEnablement(engine.postEqStage) && validateStageEnablement(engine.mbcStage); +} + +bool DynamicsProcessingSwContext::validateEqBandConfig( + const DynamicsProcessing::EqBandConfig& band, int maxChannel, int maxBand, + const std::vector& channelConfig) { + return band.channel >= 0 && band.channel < maxChannel && + (size_t)band.channel < channelConfig.size() && channelConfig[band.channel].enable && + band.band >= 0 && band.band < maxBand; +} + +bool DynamicsProcessingSwContext::validateMbcBandConfig( + const DynamicsProcessing::MbcBandConfig& band, int maxChannel, int maxBand, + const std::vector& channelConfig) { + return band.channel >= 0 && band.channel < maxChannel && + (size_t)band.channel < channelConfig.size() && channelConfig[band.channel].enable && + band.band >= 0 && band.band < maxBand && band.attackTimeMs >= 0 && + band.releaseTimeMs >= 0 && band.ratio >= 0 && band.thresholdDb <= 0 && + band.kneeWidthDb <= 0 && band.noiseGateThresholdDb <= 0 && band.expanderRatio >= 0; +} + +bool DynamicsProcessingSwContext::validateLimiterConfig( + const DynamicsProcessing::LimiterConfig& limiter, int maxChannel) { + return limiter.channel >= 0 && limiter.channel < maxChannel && limiter.attackTimeMs >= 0 && + limiter.releaseTimeMs >= 0 && limiter.ratio >= 0 && limiter.thresholdDb <= 0; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/dynamicProcessing/DynamicsProcessingSw.h b/audio/dynamicProcessing/DynamicsProcessingSw.h new file mode 100644 index 0000000..98edca0 --- /dev/null +++ b/audio/dynamicProcessing/DynamicsProcessingSw.h @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class DynamicsProcessingSwContext final : public EffectContext { + public: + DynamicsProcessingSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common), + mChannelCount(::aidl::android::hardware::audio::common::getChannelCount( + common.input.base.channelMask)), + mPreEqChCfgs(mChannelCount, {.channel = kInvalidChannelId}), + mPostEqChCfgs(mChannelCount, {.channel = kInvalidChannelId}), + mMbcChCfgs(mChannelCount, {.channel = kInvalidChannelId}), + mLimiterCfgs(mChannelCount, {.channel = kInvalidChannelId}) { + LOG(DEBUG) << __func__; + } + + // utils + RetCode setChannelCfgs(const std::vector& cfgs, + std::vector& targetCfgs, + const DynamicsProcessing::StageEnablement& engineSetting); + + RetCode setEqBandCfgs(const std::vector& cfgs, + std::vector& targetCfgs, + const DynamicsProcessing::StageEnablement& stage, + const std::vector& channelConfig); + + // set params + RetCode setCommon(const Parameter::Common& common) override; + RetCode setEngineArchitecture(const DynamicsProcessing::EngineArchitecture& cfg); + RetCode setPreEqChannelCfgs(const std::vector& cfgs); + RetCode setPostEqChannelCfgs(const std::vector& cfgs); + RetCode setMbcChannelCfgs(const std::vector& cfgs); + RetCode setPreEqBandCfgs(const std::vector& cfgs); + RetCode setPostEqBandCfgs(const std::vector& cfgs); + RetCode setMbcBandCfgs(const std::vector& cfgs); + RetCode setLimiterCfgs(const std::vector& cfgs); + RetCode setInputGainCfgs(const std::vector& cfgs); + + // get params + DynamicsProcessing::EngineArchitecture getEngineArchitecture() { return mEngineSettings; } + std::vector getPreEqChannelCfgs() { return mPreEqChCfgs; } + std::vector getPostEqChannelCfgs() { return mPostEqChCfgs; } + std::vector getMbcChannelCfgs() { return mMbcChCfgs; } + std::vector getPreEqBandCfgs() { return mPreEqChBands; } + std::vector getPostEqBandCfgs() { return mPostEqChBands; } + std::vector getMbcBandCfgs() { return mMbcChBands; } + std::vector getLimiterCfgs() { return mLimiterCfgs; } + std::vector getInputGainCfgs(); + + private: + static constexpr int32_t kInvalidChannelId = -1; + size_t mChannelCount = 0; + DynamicsProcessing::EngineArchitecture mEngineSettings; + // Channel config vector with size of mChannelCount + std::vector mPreEqChCfgs; + std::vector mPostEqChCfgs; + std::vector mMbcChCfgs; + std::vector mLimiterCfgs; + std::vector mInputGainCfgs; + // Band config vector with size of mChannelCount * bandCount + std::vector mPreEqChBands; + std::vector mPostEqChBands; + std::vector mMbcChBands; + bool validateStageEnablement(const DynamicsProcessing::StageEnablement& enablement); + bool validateEngineConfig(const DynamicsProcessing::EngineArchitecture& engine); + bool validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band, int maxChannel, + int maxBand, + const std::vector& channelConfig); + bool validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band, int maxChannel, + int maxBand, + const std::vector& channelConfig); + bool validateLimiterConfig(const DynamicsProcessing::LimiterConfig& limiter, int maxChannel); + void resizeChannels(); + void resizeBands(); +}; // DynamicsProcessingSwContext + +class DynamicsProcessingSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + DynamicsProcessingSw() { LOG(DEBUG) << __func__; } + ~DynamicsProcessingSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; }; + + private: + static const DynamicsProcessing::EqBandConfig kEqBandConfigMin; + static const DynamicsProcessing::EqBandConfig kEqBandConfigMax; + static const Range::DynamicsProcessingRange kPreEqBandRange; + static const Range::DynamicsProcessingRange kPostEqBandRange; + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); + +}; // DynamicsProcessingSw + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/envReverb/Android.bp b/audio/envReverb/Android.bp new file mode 100644 index 0000000..70da2bd --- /dev/null +++ b/audio/envReverb/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libenvreverbsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "EnvReverbSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/envReverb/EnvReverbSw.cpp b/audio/envReverb/EnvReverbSw.cpp new file mode 100644 index 0000000..7937a6a --- /dev/null +++ b/audio/envReverb/EnvReverbSw.cpp @@ -0,0 +1,327 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define LOG_TAG "AHAL_EnvReverbSw" +#include +#include +#include + +#include "EnvReverbSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::EnvReverbSw; +using aidl::android::hardware::audio::effect::getEffectImplUuidEnvReverbSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEnvReverbSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEnvReverbSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = EnvReverbSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string EnvReverbSw::kEffectName = "EnvReverbSw"; + +const std::vector EnvReverbSw::kRanges = { + MAKE_RANGE(EnvironmentalReverb, roomLevelMb, -6000, 0), + MAKE_RANGE(EnvironmentalReverb, roomHfLevelMb, -4000, 0), + MAKE_RANGE(EnvironmentalReverb, decayTimeMs, 0, 7000), + MAKE_RANGE(EnvironmentalReverb, decayHfRatioPm, 100, 2000), + MAKE_RANGE(EnvironmentalReverb, reflectionsLevelMb, -6000, 0), + MAKE_RANGE(EnvironmentalReverb, reflectionsDelayMs, 0, 65), + MAKE_RANGE(EnvironmentalReverb, levelMb, -6000, 0), + MAKE_RANGE(EnvironmentalReverb, delayMs, 0, 65), + MAKE_RANGE(EnvironmentalReverb, diffusionPm, 0, 1000), + MAKE_RANGE(EnvironmentalReverb, densityPm, 0, 1000)}; + +const Capability EnvReverbSw::kCapability = { + .range = Range::make(EnvReverbSw::kRanges)}; + +const Descriptor EnvReverbSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidEnvReverb(), + .uuid = getEffectImplUuidEnvReverbSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = EnvReverbSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = EnvReverbSw::kCapability}; + +ndk::ScopedAStatus EnvReverbSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EnvReverbSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::environmentalReverb != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + auto& erParam = specific.get(); + RETURN_IF(!inRange(erParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = erParam.getTag(); + switch (tag) { + case EnvironmentalReverb::roomLevelMb: { + RETURN_IF(mContext->setErRoomLevel(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setRoomLevelFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::roomHfLevelMb: { + RETURN_IF( + mContext->setErRoomHfLevel(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setRoomHfLevelFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::decayTimeMs: { + RETURN_IF(mContext->setErDecayTime(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDecayTimeFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::decayHfRatioPm: { + RETURN_IF( + mContext->setErDecayHfRatio( + erParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDecayHfRatioFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::reflectionsLevelMb: { + RETURN_IF(mContext->setErReflectionsLevel( + erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setReflectionsLevelFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::reflectionsDelayMs: { + RETURN_IF(mContext->setErReflectionsDelay( + erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setReflectionsDelayFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::levelMb: { + RETURN_IF(mContext->setErLevel(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setLevelFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::delayMs: { + RETURN_IF(mContext->setErDelay(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDelayFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::diffusionPm: { + RETURN_IF(mContext->setErDiffusion(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDiffusionFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::densityPm: { + RETURN_IF(mContext->setErDensity(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDensityFailed"); + return ndk::ScopedAStatus::ok(); + } + case EnvironmentalReverb::bypass: { + RETURN_IF(mContext->setErBypass(erParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setBypassFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported"); + } + } +} + +ndk::ScopedAStatus EnvReverbSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::environmentalReverbTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto erId = id.get(); + auto erIdTag = erId.getTag(); + switch (erIdTag) { + case EnvironmentalReverb::Id::commonTag: + return getParameterEnvironmentalReverb(erId.get(), + specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(erIdTag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported"); + } +} + +ndk::ScopedAStatus EnvReverbSw::getParameterEnvironmentalReverb(const EnvironmentalReverb::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + EnvironmentalReverb erParam; + switch (tag) { + case EnvironmentalReverb::roomLevelMb: { + erParam.set(mContext->getErRoomLevel()); + break; + } + case EnvironmentalReverb::roomHfLevelMb: { + erParam.set(mContext->getErRoomHfLevel()); + break; + } + case EnvironmentalReverb::decayTimeMs: { + erParam.set(mContext->getErDecayTime()); + break; + } + case EnvironmentalReverb::decayHfRatioPm: { + erParam.set(mContext->getErDecayHfRatio()); + break; + } + case EnvironmentalReverb::reflectionsLevelMb: { + erParam.set(mContext->getErReflectionsLevel()); + break; + } + case EnvironmentalReverb::reflectionsDelayMs: { + erParam.set(mContext->getErReflectionsDelay()); + break; + } + case EnvironmentalReverb::levelMb: { + erParam.set(mContext->getErLevel()); + break; + } + case EnvironmentalReverb::delayMs: { + erParam.set(mContext->getErDelay()); + break; + } + case EnvironmentalReverb::diffusionPm: { + erParam.set(mContext->getErDiffusion()); + break; + } + case EnvironmentalReverb::densityPm: { + erParam.set(mContext->getErDensity()); + break; + } + case EnvironmentalReverb::bypass: { + erParam.set(mContext->getErBypass()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported"); + } + } + + specific->set(erParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr EnvReverbSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode EnvReverbSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status EnvReverbSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode EnvReverbSwContext::setErRoomLevel(int roomLevel) { + mRoomLevel = roomLevel; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErRoomHfLevel(int roomHfLevel) { + mRoomHfLevel = roomHfLevel; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErDecayTime(int decayTime) { + mDecayTime = decayTime; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErDecayHfRatio(int decayHfRatio) { + mDecayHfRatio = decayHfRatio; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErLevel(int level) { + mLevel = level; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErDelay(int delay) { + mDelay = delay; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErDiffusion(int diffusion) { + mDiffusion = diffusion; + return RetCode::SUCCESS; +} + +RetCode EnvReverbSwContext::setErDensity(int density) { + mDensity = density; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/envReverb/EnvReverbSw.h b/audio/envReverb/EnvReverbSw.h new file mode 100644 index 0000000..367462b --- /dev/null +++ b/audio/envReverb/EnvReverbSw.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class EnvReverbSwContext final : public EffectContext { + public: + EnvReverbSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setErRoomLevel(int roomLevel); + int getErRoomLevel() const { return mRoomLevel; } + + RetCode setErRoomHfLevel(int roomHfLevel); + int getErRoomHfLevel() const { return mRoomHfLevel; } + + RetCode setErDecayTime(int decayTime); + int getErDecayTime() const { return mDecayTime; } + + RetCode setErDecayHfRatio(int decayHfRatio); + int getErDecayHfRatio() const { return mDecayHfRatio; } + + RetCode setErLevel(int level); + int getErLevel() const { return mLevel; } + + RetCode setErDelay(int delay); + int getErDelay() const { return mDelay; } + + RetCode setErDiffusion(int diffusion); + int getErDiffusion() const { return mDiffusion; } + + RetCode setErDensity(int density); + int getErDensity() const { return mDensity; } + + RetCode setErBypass(bool bypass) { + mBypass = bypass; + return RetCode::SUCCESS; + } + bool getErBypass() const { return mBypass; } + + RetCode setErReflectionsDelay(int delay) { + mReflectionsDelayMs = delay; + return RetCode::SUCCESS; + } + bool getErReflectionsDelay() const { return mReflectionsDelayMs; } + + RetCode setErReflectionsLevel(int level) { + mReflectionsLevelMb = level; + return RetCode::SUCCESS; + } + bool getErReflectionsLevel() const { return mReflectionsLevelMb; } + + private: + int mRoomLevel = -6000; // Default room level + int mRoomHfLevel = 0; // Default room hf level + int mDecayTime = 1000; // Default decay time + int mDecayHfRatio = 500; // Default decay hf ratio + int mLevel = -6000; // Default level + int mDelay = 40; // Default delay + int mReflectionsLevelMb = 0; + int mReflectionsDelayMs = 0; + int mDiffusion = 1000; // Default diffusion + int mDensity = 1000; // Default density + bool mBypass = false; // Default bypass +}; + +class EnvReverbSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + EnvReverbSw() { LOG(DEBUG) << __func__; } + ~EnvReverbSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) override; + std::string getEffectName() override { return kEffectName; } + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/equalizer/Android.bp b/audio/equalizer/Android.bp new file mode 100644 index 0000000..da2663c --- /dev/null +++ b/audio/equalizer/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libequalizersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "EqualizerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/equalizer/EqualizerSw.cpp b/audio/equalizer/EqualizerSw.cpp new file mode 100644 index 0000000..640b3ba --- /dev/null +++ b/audio/equalizer/EqualizerSw.cpp @@ -0,0 +1,218 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_EqualizerSw" +#include +#include +#include + +#include "EqualizerSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::EqualizerSw; +using aidl::android::hardware::audio::effect::getEffectImplUuidEqualizerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEqualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEqualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = EqualizerSw::kDesc; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string EqualizerSw::kEffectName = "EqualizerSw"; + +const std::vector EqualizerSw::kBandFrequency = {{0, 30000, 120000}, + {1, 120001, 460000}, + {2, 460001, 1800000}, + {3, 1800001, 7000000}, + {4, 7000001, 20000000}}; +const std::vector EqualizerSw::kPresets = { + {0, "Normal"}, {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"}, + {5, "Heavy Metal"}, {6, "Hip Hop"}, {7, "Jazz"}, {8, "Pop"}, {9, "Rock"}}; + +/** + * Use the same min and max to build a capability represented by Range. + */ +const std::vector EqualizerSw::kRanges = { + MAKE_RANGE(Equalizer, preset, 0, EqualizerSw::kPresets.size() - 1), + MAKE_RANGE(Equalizer, bandLevels, + std::vector{ + Equalizer::BandLevel({.index = 0, .levelMb = -15})}, + std::vector{Equalizer::BandLevel( + {.index = EqualizerSwContext::kMaxBandNumber - 1, .levelMb = 15})}), + /* capability definition */ + MAKE_RANGE(Equalizer, bandFrequencies, EqualizerSw::kBandFrequency, + EqualizerSw::kBandFrequency), + MAKE_RANGE(Equalizer, presets, EqualizerSw::kPresets, EqualizerSw::kPresets), + /* centerFreqMh is get only, set invalid range min > max */ + MAKE_RANGE(Equalizer, centerFreqMh, std::vector({1}), std::vector({0}))}; + +const Capability EqualizerSw::kEqCap = {.range = EqualizerSw::kRanges}; +const Descriptor EqualizerSw::kDesc = {.common = {.id = {.type = getEffectTypeUuidEqualizer(), + .uuid = getEffectImplUuidEqualizerSw()}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = EqualizerSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = EqualizerSw::kEqCap}; + +ndk::ScopedAStatus EqualizerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDesc.toString(); + *_aidl_return = kDesc; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EqualizerSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::equalizer != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& eqParam = specific.get(); + RETURN_IF(!inRange(eqParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = eqParam.getTag(); + switch (tag) { + case Equalizer::preset: { + RETURN_IF(mContext->setEqPreset(eqParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed"); + return ndk::ScopedAStatus::ok(); + } + case Equalizer::bandLevels: { + RETURN_IF(mContext->setEqBandLevels(eqParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "EqTagNotSupported"); + } + } + + LOG(ERROR) << __func__ << " unsupported eq param tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "ParamNotSupported"); +} + +ndk::ScopedAStatus EqualizerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::equalizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto eqId = id.get(); + auto eqIdTag = eqId.getTag(); + switch (eqIdTag) { + case Equalizer::Id::commonTag: + return getParameterEqualizer(eqId.get(), specific); + default: + LOG(ERROR) << __func__ << " tag " << toString(eqIdTag) << " not supported"; + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "EqualizerTagNotSupported"); + } +} + +ndk::ScopedAStatus EqualizerSw::getParameterEqualizer(const Equalizer::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + Equalizer eqParam; + switch (tag) { + case Equalizer::bandLevels: { + eqParam.set(mContext->getEqBandLevels()); + break; + } + case Equalizer::preset: { + eqParam.set(mContext->getEqPreset()); + break; + } + case Equalizer::centerFreqMh: { + eqParam.set(mContext->getCenterFreqs()); + break; + } + case Equalizer::bandFrequencies: { + eqParam.set(kBandFrequency); + break; + } + case Equalizer::presets: { + eqParam.set(kPresets); + break; + } + default: { + LOG(ERROR) << __func__ << " not handled tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "unsupportedTag"); + } + } + + specific->set(eqParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr EqualizerSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode EqualizerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status EqualizerSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/equalizer/EqualizerSw.h b/audio/equalizer/EqualizerSw.h new file mode 100644 index 0000000..caaa129 --- /dev/null +++ b/audio/equalizer/EqualizerSw.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class EqualizerSwContext final : public EffectContext { + public: + EqualizerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setEqPreset(const int& presetIdx) { + if (presetIdx < 0 || presetIdx >= kMaxPresetNumber) { + return RetCode::ERROR_ILLEGAL_PARAMETER; + } + mPreset = presetIdx; + return RetCode::SUCCESS; + } + int getEqPreset() { return mPreset; } + + RetCode setEqBandLevels(const std::vector& bandLevels) { + if (bandLevels.size() > kMaxBandNumber) { + LOG(ERROR) << __func__ << " return because size exceed " << kMaxBandNumber; + return RetCode::ERROR_ILLEGAL_PARAMETER; + } + RetCode ret = RetCode::SUCCESS; + for (auto& it : bandLevels) { + if (it.index >= kMaxBandNumber || it.index < 0) { + LOG(ERROR) << __func__ << " index illegal, skip: " << it.index << " - " + << it.levelMb; + ret = RetCode::ERROR_ILLEGAL_PARAMETER; + } else { + mBandLevels[it.index] = it.levelMb; + } + } + return ret; + } + + std::vector getEqBandLevels() { + std::vector bandLevels; + for (int i = 0; i < kMaxBandNumber; i++) { + bandLevels.push_back({i, mBandLevels[i]}); + } + return bandLevels; + } + + std::vector getCenterFreqs() { + return {std::begin(kPresetsFrequencies), std::end(kPresetsFrequencies)}; + } + static const int kMaxBandNumber = 5; + static const int kMaxPresetNumber = 10; + static const int kCustomPreset = -1; + + private: + static constexpr std::array kPresetsFrequencies = {60, 230, 910, 3600, + 14000}; + // preset band level + int mPreset = kCustomPreset; + int32_t mBandLevels[kMaxBandNumber] = {3, 0, 0, 0, 3}; + + // Add equalizer specific context for processing here +}; + +class EqualizerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kEqCap; + static const Descriptor kDesc; + + EqualizerSw() { LOG(DEBUG) << __func__; } + ~EqualizerSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + static const std::vector kBandFrequency; + static const std::vector kPresets; + static const std::vector kRanges; + ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Tag& tag, + Parameter::Specific* specific) REQUIRES(mImplMutex); + std::shared_ptr mContext; +}; + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/eraser/Android.bp b/audio/eraser/Android.bp new file mode 100644 index 0000000..c495d8e --- /dev/null +++ b/audio/eraser/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "liberasersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "Eraser.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/eraser/Eraser.cpp b/audio/eraser/Eraser.cpp new file mode 100644 index 0000000..59cc9a2 --- /dev/null +++ b/audio/eraser/Eraser.cpp @@ -0,0 +1,270 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Eraser" + +#include "Eraser.h" + +#include +#include + +#include + +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::EraserSw; +using aidl::android::hardware::audio::effect::getEffectImplUuidEraserSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidEraser; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEraserSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (!instanceSpp) { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } + + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEraserSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = EraserSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string EraserSw::kEffectName = "EraserSw"; +const Descriptor EraserSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidEraser(), .uuid = getEffectImplUuidEraserSw()}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .hwAcceleratorMode = Flags::HardwareAccelerator::NONE}, + .name = EraserSw::kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus EraserSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus EraserSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::eraser != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + return mContext->setParam(param.getTag(), param); +} + +ndk::ScopedAStatus EraserSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::eraserTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto eraserId = id.get(); + auto eraserTag = eraserId.getTag(); + switch (eraserTag) { + case Eraser::Id::commonTag: { + auto specificTag = eraserId.get(); + std::optional param = mContext->getParam(specificTag); + if (!param.has_value()) { + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "EraserTagNotSupported"); + } + specific->set(param.value()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "EraserTagNotSupported"); + } + } + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr EraserSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode EraserSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +EraserSw::~EraserSw() { + cleanUp(); + LOG(DEBUG) << __func__; +} + +ndk::ScopedAStatus EraserSw::command(CommandId command) { + std::lock_guard lg(mImplMutex); + RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "instanceNotOpen"); + + switch (command) { + case CommandId::START: + RETURN_OK_IF(mState == State::PROCESSING); + mState = State::PROCESSING; + mContext->enable(); + startThread(); + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + break; + case CommandId::STOP: + RETURN_OK_IF(mState == State::IDLE || mState == State::DRAINING); + if (mVersion < kDrainSupportedVersion) { + mState = State::IDLE; + stopThread(); + mContext->disable(); + } else { + mState = State::DRAINING; + startDraining(); + mContext->startDraining(); + } + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + break; + case CommandId::RESET: + mState = State::IDLE; + RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE, + "notifyEventFlagNotEmptyFailed"); + stopThread(); + mImplContext->disable(); + mImplContext->reset(); + mImplContext->resetBuffer(); + break; + default: + LOG(ERROR) << getEffectNameWithVersion() << __func__ << " instance still processing"; + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "CommandIdNotSupported"); + } + LOG(VERBOSE) << getEffectNameWithVersion() << __func__ + << " transfer to state: " << toString(mState); + return ndk::ScopedAStatus::ok(); +} + +// Processing method running in EffectWorker thread. +IEffect::Status EraserSw::effectProcessImpl(float* in, float* out, int samples) { + RETURN_VALUE_IF(!mContext, (IEffect::Status{EX_NULL_POINTER, 0, 0}), "nullContext"); + IEffect::Status procStatus{STATUS_NOT_ENOUGH_DATA, 0, 0}; + procStatus = mContext->process(in, out, samples); + if (mState == State::DRAINING && procStatus.status == STATUS_NOT_ENOUGH_DATA) { + drainingComplete_l(); + } + + return procStatus; +} + +void EraserSw::drainingComplete_l() { + if (mState != State::DRAINING) return; + + LOG(DEBUG) << getEffectNameWithVersion() << __func__; + finishDraining(); + mState = State::IDLE; +} + +EraserSwContext::EraserSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; +} + +EraserSwContext::~EraserSwContext() { + LOG(DEBUG) << __func__; +} + +template +std::optional EraserSwContext::getParam(TAG tag) { + if (mParamsMap.find(tag) != mParamsMap.end()) { + return mParamsMap.at(tag); + } + return std::nullopt; +} + +template +ndk::ScopedAStatus EraserSwContext::setParam(TAG tag, Eraser eraser) { + mParamsMap[tag] = eraser; + return ndk::ScopedAStatus::ok(); +} + +IEffect::Status EraserSwContext::process(float* in, float* out, int samples) { + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + IEffect::Status procStatus = {EX_ILLEGAL_ARGUMENT, 0, 0}; + const auto inputChannelCount = getChannelCount(mCommon.input.base.channelMask); + const auto outputChannelCount = getChannelCount(mCommon.output.base.channelMask); + if (inputChannelCount < outputChannelCount) { + LOG(ERROR) << __func__ << " invalid channel count, in: " << inputChannelCount + << " out: " << outputChannelCount; + return procStatus; + } + + if (samples <= 0 || 0 != samples % inputChannelCount) { + LOG(ERROR) << __func__ << " invalid samples: " << samples; + return procStatus; + } + + const int iFrames = samples / inputChannelCount; + const float gainPerSample = 1.f / iFrames; + for (int i = 0; i < iFrames; i++) { + if (isDraining()) { + const float gain = (iFrames - i - 1) * gainPerSample; + for (size_t c = 0; c < outputChannelCount; c++) { + out[c] = in[c] * gain; + } + } else { + std::memcpy(out, in, outputChannelCount * sizeof(float)); + } + + in += inputChannelCount; + out += outputChannelCount; + } + + // drain for one cycle + if (isDraining()) { + procStatus.status = STATUS_NOT_ENOUGH_DATA; + finishDraining(); + } else { + procStatus.status = STATUS_OK; + } + procStatus.fmqConsumed = static_cast(iFrames * inputChannelCount); + procStatus.fmqProduced = static_cast(iFrames * outputChannelCount); + + return procStatus; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/eraser/Eraser.h b/audio/eraser/Eraser.h new file mode 100644 index 0000000..7bf2f57 --- /dev/null +++ b/audio/eraser/Eraser.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "effect-impl/EffectContext.h" +#include "effect-impl/EffectImpl.h" + +#include + +#include +#include + +namespace aidl::android::hardware::audio::effect { + +class EraserSwContext final : public EffectContext { + public: + EraserSwContext(int statusDepth, const Parameter::Common& common); + ~EraserSwContext() final; + + template + std::optional getParam(TAG tag); + template + ndk::ScopedAStatus setParam(TAG tag, Eraser eraser); + + IEffect::Status process(float* in, float* out, int samples); + + private: + std::unordered_map mParamsMap; +}; + +class EraserSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + ~EraserSw() final; + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) final; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) final; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) final; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) final; + RetCode releaseContext() REQUIRES(mImplMutex) final; + + std::string getEffectName() final { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) final; + + ndk::ScopedAStatus command(CommandId command) final; + void drainingComplete_l() REQUIRES(mImplMutex); + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/extension/Android.bp b/audio/extension/Android.bp new file mode 100644 index 0000000..79fd857 --- /dev/null +++ b/audio/extension/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libextensioneffect", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "ExtensionEffect.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/extension/ExtensionEffect.cpp b/audio/extension/ExtensionEffect.cpp new file mode 100644 index 0000000..11916c8 --- /dev/null +++ b/audio/extension/ExtensionEffect.cpp @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#include +#define LOG_TAG "AHAL_ExtensionEffect" +#include +#include +#include + +#include "ExtensionEffect.h" + +using aidl::android::hardware::audio::effect::DefaultExtension; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::ExtensionEffect; +using aidl::android::hardware::audio::effect::getEffectImplUuidExtension; +using aidl::android::hardware::audio::effect::getEffectTypeUuidExtension; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::Range; +using aidl::android::hardware::audio::effect::VendorExtension; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidExtension()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidExtension()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = ExtensionEffect::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string ExtensionEffect::kEffectName = "ExtensionEffectExample"; + +const Descriptor ExtensionEffect::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidExtension(), + .uuid = getEffectImplUuidExtension(), + .proxy = std::nullopt}, + .name = ExtensionEffect::kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus ExtensionEffect::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ExtensionEffect::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::vendorEffect != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& vendorEffect = specific.get(); + std::optional defaultExt; + RETURN_IF(STATUS_OK != vendorEffect.extension.getParcelable(&defaultExt), EX_ILLEGAL_ARGUMENT, + "getParcelableFailed"); + RETURN_IF(!defaultExt.has_value(), EX_ILLEGAL_ARGUMENT, "parcelableNull"); + RETURN_IF(mContext->setParams(defaultExt->bytes) != RetCode::SUCCESS, EX_ILLEGAL_ARGUMENT, + "paramNotSupported"); + + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ExtensionEffect::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::vendorEffectTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto extensionId = id.get(); + std::optional defaultIdExt; + RETURN_IF(STATUS_OK != extensionId.extension.getParcelable(&defaultIdExt), EX_ILLEGAL_ARGUMENT, + "getIdParcelableFailed"); + RETURN_IF(!defaultIdExt.has_value(), EX_ILLEGAL_ARGUMENT, "parcelableIdNull"); + + VendorExtension extension; + DefaultExtension defaultExt; + defaultExt.bytes = mContext->getParams(defaultIdExt->bytes); + RETURN_IF(STATUS_OK != extension.extension.setParcelable(defaultExt), EX_ILLEGAL_ARGUMENT, + "setParcelableFailed"); + specific->set(extension); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr ExtensionEffect::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode ExtensionEffect::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status ExtensionEffect::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/extension/ExtensionEffect.h b/audio/extension/ExtensionEffect.h new file mode 100644 index 0000000..b560860 --- /dev/null +++ b/audio/extension/ExtensionEffect.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class ExtensionEffectContext final : public EffectContext { + public: + ExtensionEffectContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setParams(const std::vector& params) { + mParams = params; + return RetCode::SUCCESS; + } + std::vector getParams(std::vector id __unused) const { return mParams; } + + private: + std::vector mParams; +}; + +class ExtensionEffect final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + ExtensionEffect() { LOG(DEBUG) << __func__; } + ~ExtensionEffect() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/hapticGenerator/Android.bp b/audio/hapticGenerator/Android.bp new file mode 100644 index 0000000..fdd4fc7 --- /dev/null +++ b/audio/hapticGenerator/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libhapticgeneratorsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "HapticGeneratorSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default", + ], +} diff --git a/audio/hapticGenerator/HapticGeneratorSw.cpp b/audio/hapticGenerator/HapticGeneratorSw.cpp new file mode 100644 index 0000000..7469ab9 --- /dev/null +++ b/audio/hapticGenerator/HapticGeneratorSw.cpp @@ -0,0 +1,194 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_HapticGeneratorSw" +#include +#include +#include + +#include "HapticGeneratorSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidHapticGeneratorSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator; +using aidl::android::hardware::audio::effect::HapticGeneratorSw; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGeneratorSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGeneratorSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = HapticGeneratorSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string HapticGeneratorSw::kEffectName = "HapticGeneratorSw"; +/* Effect descriptor */ +const Descriptor HapticGeneratorSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidHapticGenerator(), + .uuid = getEffectImplUuidHapticGeneratorSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = HapticGeneratorSw::kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus HapticGeneratorSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus HapticGeneratorSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& hgParam = specific.get(); + auto tag = hgParam.getTag(); + + switch (tag) { + case HapticGenerator::hapticScales: { + RETURN_IF(mContext->setHgHapticScales(hgParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "HapticScaleNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case HapticGenerator::vibratorInfo: { + RETURN_IF(mContext->setHgVibratorInformation( + hgParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "VibratorInfoNotSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported"); + } + } +} + +ndk::ScopedAStatus HapticGeneratorSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::hapticGeneratorTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto hgId = id.get(); + auto hgIdTag = hgId.getTag(); + switch (hgIdTag) { + case HapticGenerator::Id::commonTag: + return getParameterHapticGenerator(hgId.get(), + specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported"); + } +} + +ndk::ScopedAStatus HapticGeneratorSw::getParameterHapticGenerator(const HapticGenerator::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + HapticGenerator hgParam; + switch (tag) { + case HapticGenerator::hapticScales: { + hgParam.set(mContext->getHgHapticScales()); + break; + } + case HapticGenerator::vibratorInfo: { + hgParam.set(mContext->getHgVibratorInformation()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported"); + } + } + + specific->set(hgParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr HapticGeneratorSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode HapticGeneratorSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status HapticGeneratorSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode HapticGeneratorSwContext::setHgHapticScales( + const std::vector& hapticScales) { + // Assume any audio track ID is valid + for (auto& it : hapticScales) { + mHapticScales[it.id] = it; + } + return RetCode::SUCCESS; +} + +std::vector HapticGeneratorSwContext::getHgHapticScales() const { + std::vector result; + std::transform(mHapticScales.begin(), mHapticScales.end(), std::back_inserter(result), + [](auto& scaleIt) { return scaleIt.second; }); + return result; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/hapticGenerator/HapticGeneratorSw.h b/audio/hapticGenerator/HapticGeneratorSw.h new file mode 100644 index 0000000..47f3848 --- /dev/null +++ b/audio/hapticGenerator/HapticGeneratorSw.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class HapticGeneratorSwContext final : public EffectContext { + public: + HapticGeneratorSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setHgHapticScales(const std::vector& hapticScales); + std::vector getHgHapticScales() const; + + RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo) { + // All float values are valid for resonantFrequencyHz, qFactor, maxAmplitude + mVibratorInformation = vibratorInfo; + return RetCode::SUCCESS; + } + + HapticGenerator::VibratorInformation getHgVibratorInformation() const { + return mVibratorInformation; + } + + private: + static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f; + static constexpr float DEFAULT_Q_FACTOR = 1.0f; + static constexpr float DEFAULT_MAX_AMPLITUDE = 0.0f; + std::map mHapticScales; + HapticGenerator::VibratorInformation mVibratorInformation = { + DEFAULT_RESONANT_FREQUENCY, DEFAULT_Q_FACTOR, DEFAULT_MAX_AMPLITUDE}; +}; + +class HapticGeneratorSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Descriptor kDescriptor; + HapticGeneratorSw() { LOG(DEBUG) << __func__; } + ~HapticGeneratorSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); + + ndk::ScopedAStatus getParameterHapticGenerator(const HapticGenerator::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/core-impl/AidlConversionXsdc.h b/audio/include/core-impl/AidlConversionXsdc.h new file mode 100644 index 0000000..c9aefc7 --- /dev/null +++ b/audio/include/core-impl/AidlConversionXsdc.h @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +namespace aidl::android::hardware::audio::core::internal { + +ConversionResult<::aidl::android::media::audio::common::AudioFormatDescription> +xsdc2aidl_AudioFormatDescription(const std::string& xsdc); + +ConversionResult xsdc2aidl_SurroundSoundConfig( + const ::android::audio::policy::configuration::SurroundSound& xsdc); + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/AudioPolicyConfigXmlConverter.h b/audio/include/core-impl/AudioPolicyConfigXmlConverter.h new file mode 100644 index 0000000..bff4b4a --- /dev/null +++ b/audio/include/core-impl/AudioPolicyConfigXmlConverter.h @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "core-impl/Module.h" +#include "core-impl/XmlConverter.h" + +namespace aidl::android::hardware::audio::core::internal { + +class AudioPolicyConfigXmlConverter { + public: + using ModuleConfiguration = std::pair>; + using ModuleConfigs = std::vector; + + explicit AudioPolicyConfigXmlConverter(const std::string& configFilePath) + : mConverter(configFilePath, &::android::audio::policy::configuration::read) { + if (mConverter.getXsdcConfig()) { + init(); + } + } + + std::string getError() const { return mConverter.getError(); } + ::android::status_t getStatus() const { return mConverter.getStatus(); } + + const ::aidl::android::media::audio::common::AudioHalEngineConfig& getAidlEngineConfig(); + const SurroundSoundConfig& getSurroundSoundConfig(); + std::unique_ptr releaseModuleConfigs(); + + // Public for testing purposes. + static const SurroundSoundConfig& getDefaultSurroundSoundConfig(); + + private: + const std::optional<::android::audio::policy::configuration::AudioPolicyConfiguration>& + getXsdcConfig() const { + return mConverter.getXsdcConfig(); + } + void addVolumeGroupstoEngineConfig(); + void init(); + void mapStreamToVolumeCurve( + const ::android::audio::policy::configuration::Volume& xsdcVolumeCurve); + void mapStreamsToVolumeCurves(); + void parseVolumes(); + ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeCurve> + convertVolumeCurveToAidl( + const ::android::audio::policy::configuration::Volume& xsdcVolumeCurve); + + ::aidl::android::media::audio::common::AudioHalEngineConfig mAidlEngineConfig; + XmlConverter<::android::audio::policy::configuration::AudioPolicyConfiguration> mConverter; + std::unordered_map + mVolumesReferenceMap; + std::unordered_map<::android::audio::policy::configuration::AudioStreamType, + std::vector<::aidl::android::media::audio::common::AudioHalVolumeCurve>> + mStreamToVolumeCurvesMap; + std::unique_ptr mModuleConfigurations = std::make_unique(); +}; + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/Bluetooth.h b/audio/include/core-impl/Bluetooth.h new file mode 100644 index 0000000..002cb19 --- /dev/null +++ b/audio/include/core-impl/Bluetooth.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +namespace aidl::android::hardware::audio::core { + +class ParamChangeHandler { + public: + ParamChangeHandler() = default; + void registerHandler(std::function handler) { mHandler = handler; } + + protected: + std::function mHandler = nullptr; +}; + +class Bluetooth : public BnBluetooth { + public: + Bluetooth(); + + private: + ndk::ScopedAStatus setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) override; + ndk::ScopedAStatus setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) override; + + ScoConfig mScoConfig; + HfpConfig mHfpConfig; +}; + +class BluetoothA2dp : public BnBluetoothA2dp, public ParamChangeHandler { + public: + BluetoothA2dp() = default; + ndk::ScopedAStatus isEnabled(bool* _aidl_return) override; + + private: + ndk::ScopedAStatus setEnabled(bool in_enabled) override; + ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override; + ndk::ScopedAStatus reconfigureOffload( + const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& + in_parameters) override; + + bool mEnabled = false; +}; + +class BluetoothLe : public BnBluetoothLe, public ParamChangeHandler { + public: + BluetoothLe() = default; + ndk::ScopedAStatus isEnabled(bool* _aidl_return) override; + + private: + ndk::ScopedAStatus setEnabled(bool in_enabled) override; + ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override; + ndk::ScopedAStatus reconfigureOffload( + const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& + in_parameters) override; + + bool mEnabled = false; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/CapEngineConfigXmlConverter.h b/audio/include/core-impl/CapEngineConfigXmlConverter.h new file mode 100644 index 0000000..e5da4f4 --- /dev/null +++ b/audio/include/core-impl/CapEngineConfigXmlConverter.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +#include +#include + +#include "EngineConfigXmlConverter.h" + +namespace aidl::android::hardware::audio::core::internal { + +namespace capconfiguration = ::android::audio::policy::capengine::configuration; +namespace aidlcommon = ::aidl::android::media::audio::common; + +class CapEngineConfigXmlConverter { + public: + explicit CapEngineConfigXmlConverter(const std::string& configFilePath) + : mConverter(configFilePath, &capconfiguration::readConfigurableDomains) { + if (mConverter.getXsdcConfig()) { + init(); + } + } + std::string getError() const { return mConverter.getError(); } + ::android::status_t getStatus() const { return mConverter.getStatus(); } + + std::optional< + std::vector>>& + getAidlCapEngineConfig(); + + private: + ConversionResult> convertSettingToAidl( + const capconfiguration::SettingsType::Configuration& xsdcSetting); + + ConversionResult> convertConfigurationsToAidl( + const std::vector& xsdcConfigurationsVec, + const std::vector& xsdcSettingsVec); + + ConversionResult convertConfigurationToAidl( + const capconfiguration::ConfigurationsType::Configuration& xsdcConfiguration, + const capconfiguration::SettingsType::Configuration& xsdcSettingConfiguration); + + ConversionResult convertParamToAidl( + const capconfiguration::ConfigurableElementSettingsType& element); + + ConversionResult convertConfigurationToAidl( + const capconfiguration::ConfigurationsType::Configuration& xsdcConfiguration); + ConversionResult convertConfigurableDomainToAidl( + const capconfiguration::ConfigurableDomainType& xsdcConfigurableDomain); + + const std::optional& getXsdcConfig() { + return mConverter.getXsdcConfig(); + } + void init(); + + std::optional>> mAidlCapDomains; + XmlConverter mConverter; +}; +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/ChildInterface.h b/audio/include/core-impl/ChildInterface.h new file mode 100644 index 0000000..161f6db --- /dev/null +++ b/audio/include/core-impl/ChildInterface.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include +#include +#include + +namespace aidl::android::hardware::audio::core { + +// Helper used for interfaces that require a persistent instance. We hold them via a strong +// pointer. The binder token is retained for a call to 'setMinSchedulerPolicy'. +template +struct ChildInterface : private std::pair, ndk::SpAIBinder> { + ChildInterface() = default; + ChildInterface& operator=(const std::shared_ptr& c) { + return operator=(std::shared_ptr(c)); + } + ChildInterface& operator=(std::shared_ptr&& c) { + this->first = std::move(c); + return *this; + } + explicit operator bool() const { return !!this->first; } + C& operator*() const { return *(this->first); } + C* operator->() const { return this->first; } + std::shared_ptr getPtr() { return this->first; } + // Use 'getInstance' when returning the interface instance. + std::shared_ptr getInstance() { + (void)getBinder(); + return this->first; + } + AIBinder* getBinder() { + if (this->second.get() == nullptr) { + const auto binder = this->second = this->first->asBinder(); + AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO); + AIBinder_setInheritRt(binder.get(), true); + } + return this->second.get(); + } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/Config.h b/audio/include/core-impl/Config.h new file mode 100644 index 0000000..63d4b3d --- /dev/null +++ b/audio/include/core-impl/Config.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include "AudioPolicyConfigXmlConverter.h" +#include "EngineConfigXmlConverter.h" + +namespace aidl::android::hardware::audio::core { +static const std::string kEngineConfigFileName = "audio_policy_engine_configuration.xml"; + +class Config : public BnConfig { + public: + explicit Config(internal::AudioPolicyConfigXmlConverter& apConverter) + : mAudioPolicyConverter(apConverter) {} + + private: + ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig* _aidl_return) override; + ndk::ScopedAStatus getEngineConfig( + aidl::android::media::audio::common::AudioHalEngineConfig* _aidl_return) override; + + internal::AudioPolicyConfigXmlConverter& mAudioPolicyConverter; + internal::EngineConfigXmlConverter mEngConfigConverter{ + ::android::audio_find_readable_configuration_file(kEngineConfigFileName.c_str())}; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/Configuration.h b/audio/include/core-impl/Configuration.h new file mode 100644 index 0000000..a56c8c9 --- /dev/null +++ b/audio/include/core-impl/Configuration.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include "Module.h" + +namespace aidl::android::hardware::audio::core::internal { + +std::unique_ptr getConfiguration(Module::Type moduleType); +std::vector +getStandard16And24BitPcmAudioProfiles(); + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/DevicePortProxy.h b/audio/include/core-impl/DevicePortProxy.h new file mode 100644 index 0000000..ccb23bb --- /dev/null +++ b/audio/include/core-impl/DevicePortProxy.h @@ -0,0 +1,237 @@ +/* + * Copyright 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace android::bluetooth::audio::aidl { + +enum class BluetoothStreamState : uint8_t { + DISABLED = 0, // This stream is closing or Bluetooth profiles (A2DP/LE) is disabled + STANDBY, + STARTING, + STARTED, + SUSPENDING, + UNKNOWN, +}; + +std::ostream& operator<<(std::ostream& os, const BluetoothStreamState& state); + +/** + * Proxy for Bluetooth Audio HW Module to communicate with Bluetooth Audio + * Session Control. All methods are not thread safe, so users must acquire a + * lock. Note: currently, getState() of DevicePortProxy is only used for + * verbose logging, it is not locked, so the state may not be synchronized. + */ +class BluetoothAudioPort { + public: + BluetoothAudioPort() = default; + virtual ~BluetoothAudioPort() = default; + + /** + * Fetch output control / data path of BluetoothAudioPort and setup + * callbacks into BluetoothAudioProvider. If registerPort() returns false, the audio + * HAL must delete this BluetoothAudioPort and return EINVAL to caller + */ + virtual bool registerPort( + const ::aidl::android::media::audio::common::AudioDeviceDescription&) = 0; + + /** + * Unregister this BluetoothAudioPort from BluetoothAudioSessionControl. + * Audio HAL must delete this BluetoothAudioPort after calling this. + */ + virtual void unregisterPort() = 0; + + /** + * When the Audio framework / HAL tries to query audio config about format, + * channel mask and sample rate, it uses this function to fetch from the + * Bluetooth stack + */ + virtual bool loadAudioConfig( + ::aidl::android::hardware::bluetooth::audio::PcmConfiguration&) = 0; + + /** + * When the Audio framework / HAL wants to change the stream state, it invokes + * these 4 functions to control the Bluetooth stack (Audio Control Path). + * Note: standby(), start() and suspend() will return true when there are no errors. + + * Called by Audio framework / HAL to change the state to stand by. When A2DP/LE profile is + * disabled, the port is first set to STANDBY by calling suspend and then mState is set to + * DISABLED. To reset the state back to STANDBY this method is called. + */ + virtual bool standby() = 0; + + /** + * Called by Audio framework / HAL to start the stream + */ + virtual bool start() = 0; + + /** + * Called by Audio framework / HAL to suspend the stream + */ + virtual bool suspend() = 0; + + /** + * Called by Audio framework / HAL to stop the stream + */ + virtual void stop() = 0; + + /** + * Called by the Audio framework / HAL to fetch information about audio frames + * presented to an external sink, or frames presented fror an internal sink + */ + virtual bool getPresentationPosition( + ::aidl::android::hardware::bluetooth::audio::PresentationPosition&) const = 0; + + /** + * Called by the Audio framework / HAL when the metadata of the stream's + * source has been changed. + */ + virtual bool updateSourceMetadata( + const ::aidl::android::hardware::audio::common::SourceMetadata&) const { + return false; + } + + /** + * Called by the Audio framework / HAL when the metadata of the stream's + * sink has been changed. + */ + virtual bool updateSinkMetadata( + const ::aidl::android::hardware::audio::common::SinkMetadata&) const { + return false; + } + + /** + * Return the current BluetoothStreamState + */ + virtual BluetoothStreamState getState() const = 0; + + /** + * Set the current BluetoothStreamState + */ + virtual bool setState(BluetoothStreamState) = 0; + + virtual bool isA2dp() const = 0; + + virtual bool isLeAudio() const = 0; + + virtual bool getPreferredDataIntervalUs(size_t&) const = 0; + + virtual size_t writeData(const void*, size_t) const { return 0; } + + virtual size_t readData(void*, size_t) const { return 0; } +}; + +class BluetoothAudioPortAidl : public BluetoothAudioPort { + public: + BluetoothAudioPortAidl(); + virtual ~BluetoothAudioPortAidl(); + + bool registerPort(const ::aidl::android::media::audio::common::AudioDeviceDescription& + description) override; + + void unregisterPort() override; + + bool loadAudioConfig( + ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& audio_cfg) override; + + bool standby() override; + bool start() override; + bool suspend() override; + void stop() override; + + bool getPresentationPosition(::aidl::android::hardware::bluetooth::audio::PresentationPosition& + presentation_position) const override; + + bool updateSourceMetadata(const ::aidl::android::hardware::audio::common::SourceMetadata& + sourceMetadata) const override; + + bool updateSinkMetadata(const ::aidl::android::hardware::audio::common::SinkMetadata& + sinkMetadata) const override; + + /** + * Return the current BluetoothStreamState + * Note: This method is used for logging, does not lock, so value returned may not be latest + */ + BluetoothStreamState getState() const override NO_THREAD_SAFETY_ANALYSIS; + + bool setState(BluetoothStreamState state) override; + + bool isA2dp() const override; + + bool isLeAudio() const override; + + bool getPreferredDataIntervalUs(size_t& interval_us) const override; + + protected: + uint16_t mCookie; + BluetoothStreamState mState GUARDED_BY(mCvMutex); + ::aidl::android::hardware::bluetooth::audio::SessionType mSessionType; + // WR to support Mono: True if fetching Stereo and mixing into Mono + bool mIsStereoToMono = false; + + bool inUse() const; + + std::string debugMessage() const; + + private: + // start()/suspend() report state change status via callback. Wait until kMaxWaitingTimeMs or a + // state change after a call to start()/suspend() and analyse the returned status. Below mutex, + // conditional variable serves this purpose. + mutable std::mutex mCvMutex; + std::condition_variable mInternalCv GUARDED_BY(mCvMutex); + + // Check and initialize session type for |devices| If failed, this + // BluetoothAudioPortAidl is not initialized and must be deleted. + bool initSessionType( + const ::aidl::android::media::audio::common::AudioDeviceDescription& description); + + bool condWaitState(BluetoothStreamState state); + + void controlResultHandler( + uint16_t cookie, + const ::aidl::android::hardware::bluetooth::audio::BluetoothAudioStatus& status); + void sessionChangedHandler(uint16_t cookie); +}; + +class BluetoothAudioPortAidlOut : public BluetoothAudioPortAidl { + public: + bool loadAudioConfig( + ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& audio_cfg) override; + + // The audio data path to the Bluetooth stack (Software encoding) + size_t writeData(const void* buffer, size_t bytes) const override; +}; + +class BluetoothAudioPortAidlIn : public BluetoothAudioPortAidl { + public: + // The audio data path from the Bluetooth stack (Software decoded) + size_t readData(void* buffer, size_t bytes) const override; +}; + +} // namespace android::bluetooth::audio::aidl diff --git a/audio/include/core-impl/DriverStubImpl.h b/audio/include/core-impl/DriverStubImpl.h new file mode 100644 index 0000000..84f869a --- /dev/null +++ b/audio/include/core-impl/DriverStubImpl.h @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +class DriverStubImpl : virtual public DriverInterface { + public: + explicit DriverStubImpl(const StreamContext& context) + : DriverStubImpl(context, 500 /*asyncSleepTimeUs*/) {} + DriverStubImpl(const StreamContext& context, int asyncSleepTimeUs); + + ::android::status_t init(DriverCallbackInterface* callback) override; + ::android::status_t drain(StreamDescriptor::DrainMode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t standby() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + void shutdown() override; + + protected: + const size_t mBufferSizeFrames; + const size_t mFrameSizeBytes; + const int mSampleRate; + const bool mIsAsynchronous; + const bool mIsInput; + const int32_t mMixPortHandle; + const int mAsyncSleepTimeUs; + bool mIsInitialized = false; // Used for validating the state machine logic. + bool mIsStandby = true; // Used for validating the state machine logic. + int64_t mStartTimeNs = 0; + long mFramesSinceStart = 0; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/EngineConfigXmlConverter.h b/audio/include/core-impl/EngineConfigXmlConverter.h new file mode 100644 index 0000000..211c16f --- /dev/null +++ b/audio/include/core-impl/EngineConfigXmlConverter.h @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include +#include +#include + +#include "core-impl/XmlConverter.h" + +namespace aidl::android::hardware::audio::core::internal { + +class EngineConfigXmlConverter { + public: + explicit EngineConfigXmlConverter(const std::string& configFilePath) + : mConverter(configFilePath, &::android::audio::policy::engine::configuration::read) { + if (mConverter.getXsdcConfig()) { + init(); + } + } + + std::string getError() const { return mConverter.getError(); } + ::android::status_t getStatus() const { return mConverter.getStatus(); } + + ::aidl::android::media::audio::common::AudioHalEngineConfig& getAidlEngineConfig(); + + private: + const std::optional<::android::audio::policy::engine::configuration::Configuration>& + getXsdcConfig() { + return mConverter.getXsdcConfig(); + } + void init(); + void initProductStrategyMap(); + ConversionResult<::aidl::android::media::audio::common::AudioAttributes> + convertAudioAttributesToAidl( + const ::android::audio::policy::engine::configuration::AttributesType& + xsdcAudioAttributes); + ConversionResult<::aidl::android::media::audio::common::AudioHalAttributesGroup> + convertAttributesGroupToAidl( + const ::android::audio::policy::engine::configuration::AttributesGroup& + xsdcAttributesGroup); + ConversionResult<::aidl::android::media::audio::common::AudioHalProductStrategy> + convertProductStrategyToAidl(const ::android::audio::policy::engine::configuration:: + ProductStrategies::ProductStrategy& xsdcProductStrategy); + ConversionResult convertProductStrategyIdToAidl(int xsdcId); + ConversionResult convertProductStrategyNameToAidl( + const std::string& xsdcProductStrategyName); + ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeCurve> + convertVolumeCurveToAidl( + const ::android::audio::policy::engine::configuration::Volume& xsdcVolumeCurve); + ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeGroup> + convertVolumeGroupToAidl( + const ::android::audio::policy::engine::configuration::VolumeGroupsType::VolumeGroup& + xsdcVolumeGroup); + + ::aidl::android::media::audio::common::AudioHalEngineConfig mAidlEngineConfig; + XmlConverter<::android::audio::policy::engine::configuration::Configuration> mConverter; + std::unordered_map + mAttributesReferenceMap; + std::unordered_map + mVolumesReferenceMap; + std::unordered_map mProductStrategyMap; + int mNextVendorStrategy = ::aidl::android::media::audio::common::AudioHalProductStrategy:: + VENDOR_STRATEGY_ID_START; + std::optional mDefaultProductStrategyId; +}; + +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/Module.h b/audio/include/core-impl/Module.h new file mode 100644 index 0000000..379264d --- /dev/null +++ b/audio/include/core-impl/Module.h @@ -0,0 +1,281 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "core-impl/ChildInterface.h" +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +class Module : public BnModule { + public: + struct Configuration { + std::vector<::aidl::android::media::audio::common::AudioPort> ports; + std::vector<::aidl::android::media::audio::common::AudioPortConfig> portConfigs; + std::vector<::aidl::android::media::audio::common::AudioPortConfig> initialConfigs; + // Port id -> List of profiles to use when the device port state is set to 'connected' + // in connection simulation mode. + std::map> + connectedProfiles; + std::vector routes; + std::vector patches; + int32_t nextPortId = 1; + int32_t nextPatchId = 1; + }; + enum Type : int { DEFAULT, R_SUBMIX, STUB, USB, BLUETOOTH }; + + static std::shared_ptr createInstance(Type type) { + return createInstance(type, std::make_unique()); + } + static std::shared_ptr createInstance(Type type, + std::unique_ptr&& config); + static std::optional typeFromString(const std::string& type); + + Module(Type type, std::unique_ptr&& config); + + protected: + // The vendor extension done via inheritance can override interface methods and augment + // a call to the base implementation. + + binder_status_t dump(int fd, const char** args, uint32_t numArgs) override; + + ndk::ScopedAStatus setModuleDebug( + const ::aidl::android::hardware::audio::core::ModuleDebug& in_debug) override; + ndk::ScopedAStatus getTelephony(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetooth(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetoothLe(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus connectExternalDevice( + const ::aidl::android::media::audio::common::AudioPort& in_templateIdAndAdditionalData, + ::aidl::android::media::audio::common::AudioPort* _aidl_return) override; + ndk::ScopedAStatus disconnectExternalDevice(int32_t in_portId) override; + ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t in_portId) override; + ndk::ScopedAStatus getAudioPatches(std::vector* _aidl_return) override; + ndk::ScopedAStatus getAudioPort( + int32_t in_portId, + ::aidl::android::media::audio::common::AudioPort* _aidl_return) override; + ndk::ScopedAStatus getAudioPortConfigs( + std::vector<::aidl::android::media::audio::common::AudioPortConfig>* _aidl_return) + override; + ndk::ScopedAStatus getAudioPorts( + std::vector<::aidl::android::media::audio::common::AudioPort>* _aidl_return) override; + ndk::ScopedAStatus getAudioRoutes(std::vector* _aidl_return) override; + ndk::ScopedAStatus getAudioRoutesForAudioPort( + int32_t in_portId, + std::vector<::aidl::android::hardware::audio::core::AudioRoute>* _aidl_return) override; + ndk::ScopedAStatus openInputStream( + const ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments& + in_args, + ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn* _aidl_return) + override; + ndk::ScopedAStatus openOutputStream( + const ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments& + in_args, + ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn* _aidl_return) + override; + ndk::ScopedAStatus getSupportedPlaybackRateFactors( + SupportedPlaybackRateFactors* _aidl_return) override; + ndk::ScopedAStatus setAudioPatch(const AudioPatch& in_requested, + AudioPatch* _aidl_return) override; + ndk::ScopedAStatus setAudioPortConfig( + const ::aidl::android::media::audio::common::AudioPortConfig& in_requested, + ::aidl::android::media::audio::common::AudioPortConfig* out_suggested, + bool* _aidl_return) override; + ndk::ScopedAStatus resetAudioPatch(int32_t in_patchId) override; + ndk::ScopedAStatus resetAudioPortConfig(int32_t in_portConfigId) override; + ndk::ScopedAStatus getMasterMute(bool* _aidl_return) override; + ndk::ScopedAStatus setMasterMute(bool in_mute) override; + ndk::ScopedAStatus getMasterVolume(float* _aidl_return) override; + ndk::ScopedAStatus setMasterVolume(float in_volume) override; + ndk::ScopedAStatus getMicMute(bool* _aidl_return) override; + ndk::ScopedAStatus setMicMute(bool in_mute) override; + ndk::ScopedAStatus getMicrophones( + std::vector<::aidl::android::media::audio::common::MicrophoneInfo>* _aidl_return) + override; + ndk::ScopedAStatus updateAudioMode( + ::aidl::android::media::audio::common::AudioMode in_mode) override; + ndk::ScopedAStatus updateScreenRotation( + ::aidl::android::hardware::audio::core::IModule::ScreenRotation in_rotation) override; + ndk::ScopedAStatus updateScreenState(bool in_isTurnedOn) override; + ndk::ScopedAStatus getSoundDose(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus generateHwAvSyncId(int32_t* _aidl_return) override; + ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) override; + ndk::ScopedAStatus setVendorParameters(const std::vector& in_parameters, + bool in_async) override; + ndk::ScopedAStatus addDeviceEffect( + int32_t in_portConfigId, + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + ndk::ScopedAStatus removeDeviceEffect( + int32_t in_portConfigId, + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + ndk::ScopedAStatus getMmapPolicyInfos( + ::aidl::android::media::audio::common::AudioMMapPolicyType mmapPolicyType, + std::vector<::aidl::android::media::audio::common::AudioMMapPolicyInfo>* _aidl_return) + override; + ndk::ScopedAStatus supportsVariableLatency(bool* _aidl_return) override; + ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t* _aidl_return) override; + ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override; + + // The maximum stream buffer size is 1 GiB = 2 ** 30 bytes; + static constexpr int32_t kMaximumStreamBufferSizeBytes = 1 << 30; + + private: + struct VendorDebug { + static const std::string kForceTransientBurstName; + static const std::string kForceSynchronousDrainName; + bool forceTransientBurst = false; + bool forceSynchronousDrain = false; + }; + // ids of device ports created at runtime via 'connectExternalDevice'. + // Also stores a list of ids of mix ports with dynamic profiles that were populated from + // the connected port. This list can be empty, thus an int->int multimap can't be used. + using ConnectedDevicePorts = std::map>; + // Maps port ids and port config ids to patch ids. + // Multimap because both ports and configs can be used by multiple patches. + using Patches = std::multimap; + + static const std::string kClipTransitionSupportName; + const Type mType; + std::unique_ptr mConfig; + ModuleDebug mDebug; + VendorDebug mVendorDebug; + ConnectedDevicePorts mConnectedDevicePorts; + Streams mStreams; + Patches mPatches; + bool mMicMute = false; + bool mMasterMute = false; + float mMasterVolume = 1.0f; + ChildInterface mSoundDose; + std::optional mIsMmapSupported; + + protected: + // The following virtual functions are intended for vendor extension via inheritance. + + virtual ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) = 0; + virtual ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) = 0; + // If the module is unable to populate the connected device port correctly, the returned error + // code must correspond to the errors of `IModule.connectedExternalDevice` method. + virtual ndk::ScopedAStatus populateConnectedDevicePort( + ::aidl::android::media::audio::common::AudioPort* audioPort, int32_t nextPortId); + // If the module finds that the patch endpoints configurations are not matched, the returned + // error code must correspond to the errors of `IModule.setAudioPatch` method. + virtual ndk::ScopedAStatus checkAudioPatchEndpointsMatch( + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources, + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks); + virtual void onExternalDeviceConnectionChanged( + const ::aidl::android::media::audio::common::AudioPort& audioPort, bool connected); + virtual void onPrepareToDisconnectExternalDevice( + const ::aidl::android::media::audio::common::AudioPort& audioPort); + virtual ndk::ScopedAStatus onMasterMuteChanged(bool mute); + virtual ndk::ScopedAStatus onMasterVolumeChanged(float volume); + virtual std::vector<::aidl::android::media::audio::common::MicrophoneInfo> getMicrophoneInfos(); + virtual std::unique_ptr initializeConfig(); + virtual int32_t getNominalLatencyMs( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig); + virtual ndk::ScopedAStatus calculateBufferSizeFrames( + const ::aidl::android::media::audio::common::AudioFormatDescription &format, + int32_t latencyMs, int32_t sampleRateHz, int32_t *bufferSizeFrames); + virtual ndk::ScopedAStatus createMmapBuffer( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig, + int32_t bufferSizeFrames, int32_t frameSizeBytes, MmapBufferDescriptor* desc); + + // Utility and helper functions accessible to subclasses. + static int32_t calculateBufferSizeFramesForPcm(int32_t latencyMs, int32_t sampleRateHz) { + const int32_t rawSizeFrames = + aidl::android::hardware::audio::common::frameCountFromDurationMs(latencyMs, + sampleRateHz); + // Round up to nearest 16 frames since in the framework this is the size of a mixer burst. + const int32_t multipleOf16 = (rawSizeFrames + 15) & ~15; + if (sampleRateHz < 44100 || multipleOf16 <= 512) return multipleOf16; + // Larger buffers should use powers of 2. + int32_t powerOf2 = 1; + while (powerOf2 < multipleOf16) powerOf2 <<= 1; + return powerOf2; + } + + ndk::ScopedAStatus bluetoothParametersUpdated(); + void cleanUpPatch(int32_t patchId); + ndk::ScopedAStatus createStreamContext( + int32_t in_portConfigId, int64_t in_bufferSizeFrames, + std::shared_ptr asyncCallback, + std::shared_ptr outEventCallback, + ::aidl::android::hardware::audio::core::StreamContext* out_context); + std::vector<::aidl::android::media::audio::common::AudioDevice> findConnectedDevices( + int32_t portConfigId); + std::set findConnectedPortConfigIds(int32_t portConfigId); + ndk::ScopedAStatus findPortIdForNewStream( + int32_t in_portConfigId, ::aidl::android::media::audio::common::AudioPort** port); + // Note: does not assign an ID to the config. + bool generateDefaultPortConfig(const ::aidl::android::media::audio::common::AudioPort& port, + ::aidl::android::media::audio::common::AudioPortConfig* config); + std::vector getAudioRoutesForAudioPortImpl(int32_t portId); + Configuration& getConfig(); + const ConnectedDevicePorts& getConnectedDevicePorts() const { return mConnectedDevicePorts; } + std::vector<::aidl::android::media::audio::common::AudioDevice> + getDevicesFromDevicePortConfigIds(const std::set& devicePortConfigIds); + bool getMasterMute() const { return mMasterMute; } + bool getMasterVolume() const { return mMasterVolume; } + bool getMicMute() const { return mMicMute; } + const ModuleDebug& getModuleDebug() const { return mDebug; } + const Patches& getPatches() const { return mPatches; } + std::set getRoutableAudioPortIds(int32_t portId, + std::vector* routes = nullptr); + const Streams& getStreams() const { return mStreams; } + Type getType() const { return mType; } + bool isMmapSupported(); + void populateConnectedProfiles(); + template + std::set portIdsFromPortConfigIds(C portConfigIds); + void registerPatch(const AudioPatch& patch); + ndk::ScopedAStatus setAudioPortConfigImpl( + const ::aidl::android::media::audio::common::AudioPortConfig& in_requested, + const std::function& fillPortConfig, + ::aidl::android::media::audio::common::AudioPortConfig* out_suggested, bool* applied); + ndk::ScopedAStatus updateStreamsConnectedState(const AudioPatch& oldPatch, + const AudioPatch& newPatch); + bool setAudioPortConfigGain( + const ::aidl::android::media::audio::common::AudioPort& port, + const ::aidl::android::media::audio::common::AudioGainConfig& gainRequested); +}; + +std::ostream& operator<<(std::ostream& os, Module::Type t); + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModuleAlsa.h b/audio/include/core-impl/ModuleAlsa.h new file mode 100644 index 0000000..3392b41 --- /dev/null +++ b/audio/include/core-impl/ModuleAlsa.h @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core { + +// This class is intended to be used as a base class for implementations +// that use TinyAlsa. This can be either a primary module or a USB Audio +// module. This class does not define a complete module implementation, +// and should never be used on its own. Derived classes are expected to +// provide necessary overrides for all interface methods omitted here. +class ModuleAlsa : public Module { + public: + ModuleAlsa(Type type, std::unique_ptr&& config) + : Module(type, std::move(config)) {} + + protected: + // Extension methods of 'Module'. + ndk::ScopedAStatus populateConnectedDevicePort( + ::aidl::android::media::audio::common::AudioPort* audioPort, + int32_t nextPortId) override; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModuleBluetooth.h b/audio/include/core-impl/ModuleBluetooth.h new file mode 100644 index 0000000..4e68d72 --- /dev/null +++ b/audio/include/core-impl/ModuleBluetooth.h @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include "core-impl/Bluetooth.h" +#include "core-impl/DevicePortProxy.h" +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core { + +class ModuleBluetooth final : public Module { + public: + enum BtInterface : int { BTSCO, BTA2DP, BTLE }; + typedef std::tuple, std::weak_ptr, + std::weak_ptr> + BtProfileHandles; + + ModuleBluetooth(std::unique_ptr&& config); + + private: + struct CachedProxy { + std::shared_ptr<::android::bluetooth::audio::aidl::BluetoothAudioPortAidl> ptr; + ::aidl::android::hardware::bluetooth::audio::PcmConfiguration pcmConfig; + }; + + ChildInterface& getBtA2dp(); + ChildInterface& getBtLe(); + BtProfileHandles getBtProfileManagerHandles(); + + ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetoothLe(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getMicMute(bool* _aidl_return) override; + ndk::ScopedAStatus setMicMute(bool in_mute) override; + + ndk::ScopedAStatus setAudioPortConfig( + const ::aidl::android::media::audio::common::AudioPortConfig& in_requested, + ::aidl::android::media::audio::common::AudioPortConfig* out_suggested, + bool* _aidl_return) override; + + ndk::ScopedAStatus checkAudioPatchEndpointsMatch( + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources, + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks) + override; + void onExternalDeviceConnectionChanged( + const ::aidl::android::media::audio::common::AudioPort& audioPort, bool connected); + ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) override; + ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) override; + ndk::ScopedAStatus populateConnectedDevicePort( + ::aidl::android::media::audio::common::AudioPort* audioPort, + int32_t nextPortId) override; + ndk::ScopedAStatus onMasterMuteChanged(bool mute) override; + ndk::ScopedAStatus onMasterVolumeChanged(float volume) override; + int32_t getNominalLatencyMs( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig) override; + + ndk::ScopedAStatus createProxy( + const ::aidl::android::media::audio::common::AudioPort& audioPort, + int32_t instancePortId, CachedProxy& proxy); + ndk::ScopedAStatus fetchAndCheckProxy(const StreamContext& context, CachedProxy& proxy); + ndk::ScopedAStatus findOrCreateProxy( + const ::aidl::android::media::audio::common::AudioPort& audioPort, CachedProxy& proxy); + + static constexpr int kCreateProxyRetries = 5; + static constexpr int kCreateProxyRetrySleepMs = 75; + ChildInterface mBluetoothA2dp; + ChildInterface mBluetoothLe; + std::map mProxies; + std::map mConnections; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModulePrimary.h b/audio/include/core-impl/ModulePrimary.h new file mode 100644 index 0000000..c93deed --- /dev/null +++ b/audio/include/core-impl/ModulePrimary.h @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core { + +class ModulePrimary final : public Module { + public: + ModulePrimary(std::unique_ptr&& config) + : Module(Type::DEFAULT, std::move(config)) {} + + protected: + ndk::ScopedAStatus getTelephony(std::shared_ptr* _aidl_return) override; + + ndk::ScopedAStatus calculateBufferSizeFrames( + const ::aidl::android::media::audio::common::AudioFormatDescription& format, + int32_t latencyMs, int32_t sampleRateHz, int32_t* bufferSizeFrames) override; + ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) override; + ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) override; + ndk::ScopedAStatus createMmapBuffer( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig, + int32_t bufferSizeFrames, int32_t frameSizeBytes, MmapBufferDescriptor* desc) override; + int32_t getNominalLatencyMs( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig) override; + + private: + ChildInterface mTelephony; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModuleRemoteSubmix.h b/audio/include/core-impl/ModuleRemoteSubmix.h new file mode 100644 index 0000000..9d8c027 --- /dev/null +++ b/audio/include/core-impl/ModuleRemoteSubmix.h @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core { + +class ModuleRemoteSubmix : public Module { + public: + ModuleRemoteSubmix(std::unique_ptr&& config) + : Module(Type::R_SUBMIX, std::move(config)) {} + + private: + // IModule interfaces + ndk::ScopedAStatus getMicMute(bool* _aidl_return) override; + ndk::ScopedAStatus setMicMute(bool in_mute) override; + ndk::ScopedAStatus setAudioPortConfig( + const ::aidl::android::media::audio::common::AudioPortConfig& in_requested, + ::aidl::android::media::audio::common::AudioPortConfig* out_suggested, + bool* _aidl_return) override; + + // Module interfaces + ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) override; + ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) override; + ndk::ScopedAStatus populateConnectedDevicePort( + ::aidl::android::media::audio::common::AudioPort* audioPort, + int32_t nextPortId) override; + ndk::ScopedAStatus checkAudioPatchEndpointsMatch( + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources, + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks) + override; + ndk::ScopedAStatus onMasterMuteChanged(bool mute) override; + ndk::ScopedAStatus onMasterVolumeChanged(float volume) override; + int32_t getNominalLatencyMs( + const ::aidl::android::media::audio::common::AudioPortConfig& portConfig) override; + binder_status_t dump(int fd, const char** args, uint32_t numArgs) override; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModuleStub.h b/audio/include/core-impl/ModuleStub.h new file mode 100644 index 0000000..e9b7db4 --- /dev/null +++ b/audio/include/core-impl/ModuleStub.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core { + +class ModuleStub final : public Module { + public: + ModuleStub(std::unique_ptr&& config) : Module(Type::STUB, std::move(config)) {} + + protected: + ndk::ScopedAStatus getBluetooth(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetoothLe(std::shared_ptr* _aidl_return) override; + + ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) override; + ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) override; + + private: + ChildInterface mBluetooth; + ChildInterface mBluetoothA2dp; + ChildInterface mBluetoothLe; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/ModuleUsb.h b/audio/include/core-impl/ModuleUsb.h new file mode 100644 index 0000000..d9ac4f0 --- /dev/null +++ b/audio/include/core-impl/ModuleUsb.h @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/ModuleAlsa.h" + +namespace aidl::android::hardware::audio::core { + +class ModuleUsb final : public ModuleAlsa { + public: + ModuleUsb(std::unique_ptr&& config) : ModuleAlsa(Type::USB, std::move(config)) {} + + private: + // IModule interfaces + ndk::ScopedAStatus getTelephony(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getBluetooth(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus getMicMute(bool* _aidl_return) override; + ndk::ScopedAStatus setMicMute(bool in_mute) override; + + // Module interfaces + ndk::ScopedAStatus createInputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + std::shared_ptr* result) override; + ndk::ScopedAStatus createOutputStream( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + std::shared_ptr* result) override; + ndk::ScopedAStatus populateConnectedDevicePort( + ::aidl::android::media::audio::common::AudioPort* audioPort, + int32_t nextPortId) override; + ndk::ScopedAStatus checkAudioPatchEndpointsMatch( + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources, + const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks) + override; + void onExternalDeviceConnectionChanged( + const ::aidl::android::media::audio::common::AudioPort& audioPort, + bool connected) override; + ndk::ScopedAStatus onMasterMuteChanged(bool mute) override; + ndk::ScopedAStatus onMasterVolumeChanged(float volume) override; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/SoundDose.h b/audio/include/core-impl/SoundDose.h new file mode 100644 index 0000000..f58e541 --- /dev/null +++ b/audio/include/core-impl/SoundDose.h @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include +#include +#include +#include +#include + +namespace aidl::android::hardware::audio::core::sounddose { + +// Interface used for processing the data received by a stream. +class StreamDataProcessorInterface { + public: + virtual ~StreamDataProcessorInterface() = default; + + virtual void startDataProcessor( + uint32_t samplerate, uint32_t channelCount, + const ::aidl::android::media::audio::common::AudioFormatDescription& format) = 0; + virtual void setAudioDevice( + const ::aidl::android::media::audio::common::AudioDevice& audioDevice) = 0; + virtual void process(const void* buffer, size_t size) = 0; +}; + +class SoundDose final : public BnSoundDose, public StreamDataProcessorInterface { + public: + SoundDose() : mMelCallback(::android::sp::make(this)){}; + + // -------------------------------------- BnSoundDose ------------------------------------------ + ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override; + ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override; + ndk::ScopedAStatus registerSoundDoseCallback( + const std::shared_ptr& in_callback) override; + + // ----------------------------- StreamDataProcessorInterface ---------------------------------- + void setAudioDevice( + const ::aidl::android::media::audio::common::AudioDevice& audioDevice) override; + void startDataProcessor( + uint32_t samplerate, uint32_t channelCount, + const ::aidl::android::media::audio::common::AudioFormatDescription& format) override; + void process(const void* buffer, size_t size) override; + + private: + class MelCallback : public ::android::audio_utils::MelProcessor::MelCallback { + public: + explicit MelCallback(SoundDose* soundDose) : mSoundDose(*soundDose) {} + + // ------------------------------------ MelCallback ---------------------------------------- + void onNewMelValues(const std::vector& mels, size_t offset, size_t length, + audio_port_handle_t deviceId, bool attenuated) const override; + void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override; + + SoundDose& mSoundDose; // must outlive MelCallback, not owning + }; + + void onNewMelValues(const std::vector& mels, size_t offset, size_t length, + audio_port_handle_t deviceId) const; + void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const; + + mutable ::android::audio_utils::mutex mCbMutex; + std::shared_ptr mCallback GUARDED_BY(mCbMutex); + std::optional<::aidl::android::media::audio::common::AudioDevice> mAudioDevice + GUARDED_BY(mCbMutex); + mutable ::android::audio_utils::mutex mMutex; + float mRs2Value GUARDED_BY(mMutex) = DEFAULT_MAX_RS2; + ::android::sp<::android::audio_utils::MelProcessor> mMelProcessor GUARDED_BY(mMutex); + ::android::sp mMelCallback GUARDED_BY(mMutex); +}; + +} // namespace aidl::android::hardware::audio::core::sounddose diff --git a/audio/include/core-impl/Stream.h b/audio/include/core-impl/Stream.h new file mode 100644 index 0000000..bb790e9 --- /dev/null +++ b/audio/include/core-impl/Stream.h @@ -0,0 +1,740 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core-impl/ChildInterface.h" +#include "core-impl/SoundDose.h" +#include "core-impl/utils.h" + +namespace aidl::android::hardware::audio::core { + +// This class is similar to StreamDescriptor, but unlike +// the descriptor, it actually owns the objects implementing +// data exchange: FMQs etc, whereas StreamDescriptor only +// contains their descriptors. +class StreamContext { + public: + typedef ::android::AidlMessageQueue< + StreamDescriptor::Command, + ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> + CommandMQ; + typedef ::android::AidlMessageQueue< + StreamDescriptor::Reply, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> + ReplyMQ; + typedef ::android::AidlMessageQueue< + int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> + DataMQ; + + // Ensure that this value is not used by any of StreamDescriptor.State enums + static constexpr StreamDescriptor::State STATE_CLOSED = + static_cast(-1); + + struct DebugParameters { + // An extra delay for transient states, in ms. + int transientStateDelayMs = 0; + // Force the "burst" command to move the SM to the TRANSFERRING state. + bool forceTransientBurst = false; + // Force the "drain" command to be synchronous, going directly to the IDLE state. + bool forceSynchronousDrain = false; + }; + + StreamContext() = default; + StreamContext(std::unique_ptr commandMQ, std::unique_ptr replyMQ, + const ::aidl::android::media::audio::common::AudioFormatDescription& format, + const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout, + int sampleRate, const ::aidl::android::media::audio::common::AudioIoFlags& flags, + int32_t nominalLatencyMs, int32_t mixPortHandle, std::unique_ptr dataMQ, + std::shared_ptr asyncCallback, + std::shared_ptr outEventCallback, + std::weak_ptr streamDataProcessor, + DebugParameters debugParameters) + : mCommandMQ(std::move(commandMQ)), + mInternalCommandCookie(std::rand() | 1 /* make sure it's not 0 */), + mReplyMQ(std::move(replyMQ)), + mFormat(format), + mChannelLayout(channelLayout), + mSampleRate(sampleRate), + mFlags(flags), + mNominalLatencyMs(nominalLatencyMs), + mMixPortHandle(mixPortHandle), + mDataMQ(std::move(dataMQ)), + mAsyncCallback(asyncCallback), + mOutEventCallback(outEventCallback), + mStreamDataProcessor(streamDataProcessor), + mDebugParameters(debugParameters) {} + StreamContext(std::unique_ptr commandMQ, std::unique_ptr replyMQ, + const ::aidl::android::media::audio::common::AudioFormatDescription& format, + const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout, + int sampleRate, const ::aidl::android::media::audio::common::AudioIoFlags& flags, + int32_t nominalLatencyMs, int32_t mixPortHandle, MmapBufferDescriptor&& mmapDesc, + std::shared_ptr outEventCallback, + std::weak_ptr streamDataProcessor, + DebugParameters debugParameters) + : mCommandMQ(std::move(commandMQ)), + mInternalCommandCookie(std::rand() | 1 /* make sure it's not 0 */), + mReplyMQ(std::move(replyMQ)), + mFormat(format), + mChannelLayout(channelLayout), + mSampleRate(sampleRate), + mFlags(flags), + mNominalLatencyMs(nominalLatencyMs), + mMixPortHandle(mixPortHandle), + mMmapBufferDesc(std::move(mmapDesc)), + mOutEventCallback(outEventCallback), + mStreamDataProcessor(streamDataProcessor), + mDebugParameters(debugParameters) {} + + void fillDescriptor(StreamDescriptor* desc); + std::shared_ptr getAsyncCallback() const { return mAsyncCallback; } + size_t getBufferSizeInFrames() const; + ::aidl::android::media::audio::common::AudioChannelLayout getChannelLayout() const { + return mChannelLayout; + } + CommandMQ* getCommandMQ() const { return mCommandMQ.get(); } + DataMQ* getDataMQ() const { return mDataMQ.get(); } + ::aidl::android::media::audio::common::AudioFormatDescription getFormat() const { + return mFormat; + } + ::aidl::android::media::audio::common::AudioIoFlags getFlags() const { return mFlags; } + bool getForceTransientBurst() const { return mDebugParameters.forceTransientBurst; } + bool getForceSynchronousDrain() const { return mDebugParameters.forceSynchronousDrain; } + size_t getFrameSize() const; + int getInternalCommandCookie() const { return mInternalCommandCookie; } + int32_t getMixPortHandle() const { return mMixPortHandle; } + int32_t getNominalLatencyMs() const { return mNominalLatencyMs; } + std::shared_ptr getOutEventCallback() const { + return mOutEventCallback; + } + std::weak_ptr getStreamDataProcessor() const { + return mStreamDataProcessor; + } + void startStreamDataProcessor(); + ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); } + int getTransientStateDelayMs() const { return mDebugParameters.transientStateDelayMs; } + int getSampleRate() const { return mSampleRate; } + bool isInput() const { + return mFlags.getTag() == ::aidl::android::media::audio::common::AudioIoFlags::input; + } + bool isMmap() const { return ::aidl::android::hardware::audio::common::hasMmapFlag(mFlags); } + bool isValid() const; + // 'reset' is called on a Binder thread when closing the stream. Does not use + // locking because it only cleans MQ pointers which were also set on the Binder thread. + void reset(); + // 'advanceFrameCount' and 'getFrameCount' are only called on the worker thread. + int64_t advanceFrameCount(size_t increase) { return mFrameCount += increase; } + int64_t getFrameCount() const { return mFrameCount; } + + private: + // Fields are non const to allow move assignment. + std::unique_ptr mCommandMQ; + int mInternalCommandCookie; // The value used to confirm that the command was posted internally + std::unique_ptr mReplyMQ; + ::aidl::android::media::audio::common::AudioFormatDescription mFormat; + ::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout; + int mSampleRate; + ::aidl::android::media::audio::common::AudioIoFlags mFlags; + int32_t mNominalLatencyMs; + int32_t mMixPortHandle; + // Only one of `mDataMQ` or `mMapBufferDesc` can be active, depending on `isMmap` + std::unique_ptr mDataMQ; + MmapBufferDescriptor mMmapBufferDesc; + std::shared_ptr mAsyncCallback; + std::shared_ptr mOutEventCallback; // Only used by output streams + std::weak_ptr mStreamDataProcessor; + DebugParameters mDebugParameters; + int64_t mFrameCount = 0; +}; + +// Driver callbacks are executed on a dedicated thread, not on the worker thread. +struct DriverCallbackInterface { + virtual ~DriverCallbackInterface() = default; + // Both callbacks are used to notify the worker about the progress of the playback + // offloaded to the DSP. + + // 'bufferFramesLeft' is how many *encoded* frames are left in the buffer until + // it depletes. + virtual void onBufferStateChange(size_t bufferFramesLeft) = 0; + // 'clipFramesLeft' is how many *decoded* frames are left until the end of the currently + // playing clip. '0' frames left means that the clip has ended (by itself or due + // to draining). + // 'hasNextClip' indicates whether the DSP has audio data for the next clip. + virtual void onClipStateChange(size_t clipFramesLeft, bool hasNextClip) = 0; +}; + +// This interface provides operations of the stream which are executed on the worker thread. +struct DriverInterface { + virtual ~DriverInterface() = default; + // All the methods below are called on the worker thread. + virtual ::android::status_t init(DriverCallbackInterface* callback) = 0; // Called once. + virtual ::android::status_t drain(StreamDescriptor::DrainMode mode) = 0; + virtual ::android::status_t flush() = 0; + virtual ::android::status_t pause() = 0; + virtual ::android::status_t standby() = 0; + virtual ::android::status_t start() = 0; + virtual ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) = 0; + // No need to implement 'refinePosition' unless the driver can provide more precise + // data than just total frame count. For example, the driver may correctly account + // for any intermediate buffers. + virtual ::android::status_t refinePosition(StreamDescriptor::Position* /*position*/) { + return ::android::OK; + } + // Implement 'getMmapPositionAndLatency' is necessary if driver can support mmap stream. + virtual ::android::status_t getMmapPositionAndLatency(StreamDescriptor::Position* /*position*/, + int32_t* /*latency*/) { + return ::android::OK; + } + virtual void shutdown() = 0; // This function is only called once. +}; + +class StreamWorkerCommonLogic : public ::android::hardware::audio::common::StreamLogic, + public DriverCallbackInterface { + public: + bool isClosed() const { return mState == StreamContext::STATE_CLOSED; } + StreamDescriptor::State setClosed() { + auto prevState = mState.exchange(StreamContext::STATE_CLOSED); + if (prevState != StreamContext::STATE_CLOSED) { + mStatePriorToClosing = prevState; + } + return mStatePriorToClosing; + } + void setIsConnected(bool connected) { mIsConnected = connected; } + + protected: + using DataBufferElement = int8_t; + + StreamWorkerCommonLogic(StreamContext* context, DriverInterface* driver) + : mContext(context), + mDriver(driver), + mTransientStateDelayMs(context->getTransientStateDelayMs()) {} + pid_t getTid() const; + + // ::android::hardware::audio::common::StreamLogic + std::string init() override; + // DriverCallbackInterface + void onBufferStateChange(size_t bufferFramesLeft) override; + void onClipStateChange(size_t clipFramesLeft, bool hasNextClip) override; + + void populateReply(StreamDescriptor::Reply* reply, bool isConnected) const; + void populateReplyWrongState(StreamDescriptor::Reply* reply, + const StreamDescriptor::Command& command) const; + void switchToTransientState(StreamDescriptor::State state) { + mState = state; + mTransientStateStart = std::chrono::steady_clock::now(); + } + + // The context is only used for reading, except for updating the frame count, + // which happens on the worker thread only. + StreamContext* const mContext; + DriverInterface* const mDriver; + // This is the state the stream was in before being closed. It is retrieved by the main + // thread after joining the worker thread. + StreamDescriptor::State mStatePriorToClosing = StreamDescriptor::State::STANDBY; + // Atomic fields are used both by the main and worker threads. + std::atomic mIsConnected = false; + static_assert(std::atomic::is_always_lock_free); + std::atomic mState = StreamDescriptor::State::STANDBY; + // All fields below are used on the worker thread only. + const std::chrono::duration mTransientStateDelayMs; + std::chrono::time_point mTransientStateStart; + // We use an array and the "size" field instead of a vector to be able to detect + // memory allocation issues. + std::unique_ptr mDataBuffer; + size_t mDataBufferSize; +}; + +// This interface is used to decouple stream implementations from a concrete StreamWorker +// implementation. +struct StreamWorkerInterface { + using CreateInstance = + std::function; + virtual ~StreamWorkerInterface() = default; + virtual bool isClosed() const = 0; + virtual void setIsConnected(bool isConnected) = 0; + virtual StreamDescriptor::State setClosed() = 0; + virtual bool start() = 0; + virtual pid_t getTid() = 0; + virtual void join() = 0; + virtual std::string getError() = 0; +}; + +template +class StreamWorkerImpl : public StreamWorkerInterface, + public ::android::hardware::audio::common::StreamWorker { + using WorkerImpl = ::android::hardware::audio::common::StreamWorker; + + public: + StreamWorkerImpl(StreamContext* context, DriverInterface* driver) + : WorkerImpl(context, driver) {} + bool isClosed() const override { return WorkerImpl::isClosed(); } + void setIsConnected(bool isConnected) override { WorkerImpl::setIsConnected(isConnected); } + StreamDescriptor::State setClosed() override { return WorkerImpl::setClosed(); } + bool start() override { + // This is an "audio service thread," must have elevated priority. + return WorkerImpl::start(WorkerImpl::kThreadName, ANDROID_PRIORITY_URGENT_AUDIO); + } + pid_t getTid() override { return WorkerImpl::getTid(); } + void join() override { return WorkerImpl::join(); } + std::string getError() override { return WorkerImpl::getError(); } +}; + +class StreamInWorkerLogic : public StreamWorkerCommonLogic { + public: + static const std::string kThreadName; + StreamInWorkerLogic(StreamContext* context, DriverInterface* driver) + : StreamWorkerCommonLogic(context, driver) {} + + protected: + Status cycle() override; + + private: + bool read(size_t clientSize, StreamDescriptor::Reply* reply); + bool readMmap(StreamDescriptor::Reply* reply); +}; +using StreamInWorker = StreamWorkerImpl; + +class StreamOutWorkerLogic : public StreamWorkerCommonLogic { + public: + static const std::string kThreadName; + StreamOutWorkerLogic(StreamContext* context, DriverInterface* driver) + : StreamWorkerCommonLogic(context, driver), + mEventCallback(context->getOutEventCallback()) {} + + protected: + Status cycle() override; + // DriverCallbackInterface + void onBufferStateChange(size_t bufferFramesLeft) override; + void onClipStateChange(size_t clipFramesLeft, bool hasNextClip) override; + + private: + bool write(size_t clientSize, StreamDescriptor::Reply* reply); + bool writeMmap(StreamDescriptor::Reply* reply); + + std::shared_ptr mEventCallback; + + enum DrainState : int32_t { NONE, ALL, EN /*early notify*/, EN_SENT }; + std::atomic mDrainState = DrainState::NONE; +}; +using StreamOutWorker = StreamWorkerImpl; + +// This interface provides operations of the stream which are executed on a Binder pool thread. +// These methods originate both from the AIDL interface and its implementation. +struct StreamCommonInterface { + using ConnectedDevices = std::vector<::aidl::android::media::audio::common::AudioDevice>; + using Metadata = + std::variant<::aidl::android::hardware::audio::common::SinkMetadata /*IStreamIn*/, + ::aidl::android::hardware::audio::common::SourceMetadata /*IStreamOut*/>; + + static constexpr bool isInput(const Metadata& metadata) { return metadata.index() == 0; } + + virtual ~StreamCommonInterface() = default; + // Methods below originate from the 'IStreamCommon' interface. + // This is semantically equivalent to inheriting from 'IStreamCommon' with a benefit + // that concrete stream implementations can inherit both from this interface and IStreamIn/Out. + virtual ndk::ScopedAStatus close() = 0; + virtual ndk::ScopedAStatus prepareToClose() = 0; + virtual ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) = 0; + virtual ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) = 0; + virtual ndk::ScopedAStatus setVendorParameters( + const std::vector& in_parameters, bool in_async) = 0; + virtual ndk::ScopedAStatus addEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& + in_effect) = 0; + virtual ndk::ScopedAStatus removeEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& + in_effect) = 0; + // Methods below are common for both 'IStreamIn' and 'IStreamOut'. Note that + // 'updateMetadata' in them uses an individual structure which is wrapped here. + // The 'Common' suffix is added to distinguish them from the methods from 'IStreamIn/Out'. + virtual ndk::ScopedAStatus getStreamCommonCommon( + std::shared_ptr* _aidl_return) = 0; + virtual ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) = 0; + // Methods below are called by implementation of 'IModule', 'IStreamIn' and 'IStreamOut'. + virtual ndk::ScopedAStatus initInstance( + const std::shared_ptr& delegate) = 0; + virtual const StreamContext& getContext() const = 0; + virtual bool isClosed() const = 0; + virtual const ConnectedDevices& getConnectedDevices() const = 0; + virtual ndk::ScopedAStatus setConnectedDevices( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) = 0; + virtual ndk::ScopedAStatus bluetoothParametersUpdated() = 0; + virtual ndk::ScopedAStatus setGain(float gain) = 0; +}; + +// This is equivalent to automatically generated 'IStreamCommonDelegator' but uses +// a weak pointer to avoid creating a reference loop. The loop will occur because +// 'IStreamIn/Out.getStreamCommon' must return the same instance every time, thus +// the stream implementation must hold a strong pointer to an instance of 'IStreamCommon'. +// Also, we use 'StreamCommonInterface' here instead of 'IStreamCommon'. +class StreamCommonDelegator : public BnStreamCommon { + public: + explicit StreamCommonDelegator(const std::shared_ptr& delegate) + : mDelegate(delegate) {} + + private: + ndk::ScopedAStatus close() override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->close() + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus prepareToClose() override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->prepareToClose() + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->updateHwAvSyncId(in_hwAvSyncId) + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->getVendorParameters(in_ids, _aidl_return) + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus setVendorParameters(const std::vector& in_parameters, + bool in_async) override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->setVendorParameters(in_parameters, in_async) + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus addEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->addEffect(in_effect) + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + ndk::ScopedAStatus removeEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override { + auto delegate = mDelegate.lock(); + return delegate != nullptr ? delegate->removeEffect(in_effect) + : ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + // It is possible that on the client side the proxy for IStreamCommon will outlive + // the IStream* instance, and the server side IStream* instance will get destroyed + // while this IStreamCommon instance is still alive. + std::weak_ptr mDelegate; +}; + +// The implementation of DriverInterface must be provided by each concrete stream implementation. +// Note that StreamCommonImpl does not own the context. This is to support swapping on the fly +// implementations of the stream while keeping the same IStreamIn/Out instance. It's that instance +// who must be owner of the context. +class StreamCommonImpl : virtual public StreamCommonInterface, virtual public DriverInterface { + public: + StreamCommonImpl(StreamContext* context, const Metadata& metadata, + const StreamWorkerInterface::CreateInstance& createWorker) + : mContext(*context), mMetadata(metadata), mWorker(createWorker(context, this)) {} + StreamCommonImpl(StreamContext* context, const Metadata& metadata) + : StreamCommonImpl( + context, metadata, + isInput(metadata) ? getDefaultInWorkerCreator() : getDefaultOutWorkerCreator()) {} + ~StreamCommonImpl(); + + ndk::ScopedAStatus close() override; + ndk::ScopedAStatus prepareToClose() override; + ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override; + ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) override; + ndk::ScopedAStatus setVendorParameters(const std::vector& in_parameters, + bool in_async) override; + ndk::ScopedAStatus addEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + ndk::ScopedAStatus removeEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect) + override; + + ndk::ScopedAStatus getStreamCommonCommon(std::shared_ptr* _aidl_return) override; + ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override; + + ndk::ScopedAStatus initInstance( + const std::shared_ptr& delegate) override; + const StreamContext& getContext() const override { return mContext; } + bool isClosed() const override { return mWorker->isClosed(); } + const ConnectedDevices& getConnectedDevices() const override { return mConnectedDevices; } + ndk::ScopedAStatus setConnectedDevices( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) + override; + ndk::ScopedAStatus bluetoothParametersUpdated() override; + ndk::ScopedAStatus setGain(float gain) override; + + protected: + static StreamWorkerInterface::CreateInstance getDefaultInWorkerCreator() { + return [](StreamContext* ctx, DriverInterface* driver) -> StreamWorkerInterface* { + return new StreamInWorker(ctx, driver); + }; + } + static StreamWorkerInterface::CreateInstance getDefaultOutWorkerCreator() { + return [](StreamContext* ctx, DriverInterface* driver) -> StreamWorkerInterface* { + return new StreamOutWorker(ctx, driver); + }; + } + + virtual void onClose(StreamDescriptor::State statePriorToClosing) = 0; + // Any stream class implementing 'DriverInterface::shutdown' must call 'cleanupWorker' in + // the destructor in order to stop and join the worker thread in the case when the client + // has not called 'IStreamCommon::close' method. + void cleanupWorker(); + void setWorkerThreadPriority(pid_t workerTid); + void stopAndJoinWorker(); + void stopWorker(); + + const StreamContext& mContext; + Metadata mMetadata; + std::unique_ptr mWorker; + ChildInterface mCommon; + ConnectedDevices mConnectedDevices; + + private: + std::atomic mWorkerStopIssued = false; +}; + +// Note: 'StreamIn/Out' can not be used on their own. Instead, they must be used for defining +// concrete input/output stream implementations. +class StreamIn : virtual public StreamCommonInterface, public BnStreamIn { + protected: + void defaultOnClose(); + + ndk::ScopedAStatus getStreamCommon(std::shared_ptr* _aidl_return) override { + return getStreamCommonCommon(_aidl_return); + } + ndk::ScopedAStatus updateMetadata(const ::aidl::android::hardware::audio::common::SinkMetadata& + in_sinkMetadata) override { + return updateMetadataCommon(in_sinkMetadata); + } + ndk::ScopedAStatus getActiveMicrophones( + std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return) + override; + ndk::ScopedAStatus getMicrophoneDirection(MicrophoneDirection* _aidl_return) override; + ndk::ScopedAStatus setMicrophoneDirection(MicrophoneDirection in_direction) override; + ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override; + ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override; + ndk::ScopedAStatus getHwGain(std::vector* _aidl_return) override; + ndk::ScopedAStatus setHwGain(const std::vector& in_channelGains) override; + + friend class ndk::SharedRefBase; + + StreamIn(StreamContext&& context, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + StreamContext mContextInstance; + const std::map<::aidl::android::media::audio::common::AudioDevice, std::string> mMicrophones; +}; + +class StreamInHwGainHelper { + protected: + explicit StreamInHwGainHelper(const StreamContext* context); + + ndk::ScopedAStatus getHwGainImpl(std::vector* _aidl_return); + ndk::ScopedAStatus setHwGainImpl(const std::vector& in_channelGains); + + const size_t mChannelCount; + std::vector mHwGains; +}; + +class StreamOut : virtual public StreamCommonInterface, public BnStreamOut { + protected: + void defaultOnClose(); + + ndk::ScopedAStatus getStreamCommon(std::shared_ptr* _aidl_return) override { + return getStreamCommonCommon(_aidl_return); + } + ndk::ScopedAStatus updateMetadata( + const ::aidl::android::hardware::audio::common::SourceMetadata& in_sourceMetadata) + override { + return updateMetadataCommon(in_sourceMetadata); + } + ndk::ScopedAStatus updateOffloadMetadata( + const ::aidl::android::hardware::audio::common::AudioOffloadMetadata& + in_offloadMetadata) override; + ndk::ScopedAStatus getHwVolume(std::vector* _aidl_return) override; + ndk::ScopedAStatus setHwVolume(const std::vector& in_channelVolumes) override; + ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override; + ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override; + ndk::ScopedAStatus getDualMonoMode( + ::aidl::android::media::audio::common::AudioDualMonoMode* _aidl_return) override; + ndk::ScopedAStatus setDualMonoMode( + ::aidl::android::media::audio::common::AudioDualMonoMode in_mode) override; + ndk::ScopedAStatus getRecommendedLatencyModes( + std::vector<::aidl::android::media::audio::common::AudioLatencyMode>* _aidl_return) + override; + ndk::ScopedAStatus setLatencyMode( + ::aidl::android::media::audio::common::AudioLatencyMode in_mode) override; + ndk::ScopedAStatus getPlaybackRateParameters( + ::aidl::android::media::audio::common::AudioPlaybackRate* _aidl_return) override; + ndk::ScopedAStatus setPlaybackRateParameters( + const ::aidl::android::media::audio::common::AudioPlaybackRate& in_playbackRate) + override; + ndk::ScopedAStatus selectPresentation(int32_t in_presentationId, int32_t in_programId) override; + + friend class ndk::SharedRefBase; + + StreamOut(StreamContext&& context, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + StreamContext mContextInstance; + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo> mOffloadInfo; + std::optional<::aidl::android::hardware::audio::common::AudioOffloadMetadata> mOffloadMetadata; +}; + +class StreamOutHwVolumeHelper { + protected: + explicit StreamOutHwVolumeHelper(const StreamContext* context); + + ndk::ScopedAStatus getHwVolumeImpl(std::vector* _aidl_return); + ndk::ScopedAStatus setHwVolumeImpl(const std::vector& in_channelVolumes); + + const size_t mChannelCount; + std::vector mHwVolumes; +}; + +// The recommended way to create a stream instance. +// 'StreamImpl' is the concrete stream implementation, 'StreamInOrOut' is either 'StreamIn' or +// 'StreamOut', the rest are the arguments forwarded to the constructor of 'StreamImpl'. +template +ndk::ScopedAStatus createStreamInstance(std::shared_ptr* result, Args&&... args) { + std::shared_ptr stream = + ::ndk::SharedRefBase::make(std::forward(args)...); + RETURN_STATUS_IF_ERROR(stream->initInstance(stream)); + *result = std::move(stream); + return ndk::ScopedAStatus::ok(); +} + +class StreamWrapper { + public: + explicit StreamWrapper(const std::shared_ptr& streamIn) + : mStream(streamIn), mStreamBinder(streamIn->asBinder()) {} + explicit StreamWrapper(const std::shared_ptr& streamOut) + : mStream(streamOut), mStreamBinder(streamOut->asBinder()) {} + ndk::SpAIBinder getBinder() const { return mStreamBinder; } + bool isStreamOpen() const { + auto s = mStream.lock(); + return s && !s->isClosed(); + } + ndk::ScopedAStatus setConnectedDevices( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) { + auto s = mStream.lock(); + if (s) return s->setConnectedDevices(devices); + return ndk::ScopedAStatus::ok(); + } + ndk::ScopedAStatus bluetoothParametersUpdated() { + auto s = mStream.lock(); + if (s) return s->bluetoothParametersUpdated(); + return ndk::ScopedAStatus::ok(); + } + + ndk::ScopedAStatus setGain(float gain) { + auto s = mStream.lock(); + if (s) return s->setGain(gain); + return ndk::ScopedAStatus::ok(); + } + + void dump(int fd, const char** args, uint32_t numArgs) const { + auto s = ::ndk::ICInterface::asInterface(mStreamBinder.get()); + if (s) s->dump(fd, args, numArgs); + return; + } + + private: + std::weak_ptr mStream; + ndk::SpAIBinder mStreamBinder; +}; + +class Streams { + public: + Streams() = default; + Streams(const Streams&) = delete; + Streams& operator=(const Streams&) = delete; + size_t count(int32_t id) { + // Streams do not remove themselves from the collection on close. + erase_if(mStreams, [](const auto& pair) { return !pair.second.isStreamOpen(); }); + return mStreams.count(id); + } + void insert(int32_t portId, int32_t portConfigId, StreamWrapper sw) { + mStreams.insert(std::pair{portConfigId, sw}); + mStreams.insert(std::pair{portId, std::move(sw)}); + } + ndk::ScopedAStatus setStreamConnectedDevices( + int32_t portConfigId, + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) { + if (auto it = mStreams.find(portConfigId); it != mStreams.end()) { + return it->second.setConnectedDevices(devices); + } + return ndk::ScopedAStatus::ok(); + } + ndk::ScopedAStatus bluetoothParametersUpdated() { + bool isOk = true; + for (auto& it : mStreams) { + if (!it.second.bluetoothParametersUpdated().isOk()) isOk = false; + } + return isOk ? ndk::ScopedAStatus::ok() + : ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + ndk::ScopedAStatus setGain(int32_t portId, float gain) { + if (auto it = mStreams.find(portId); it != mStreams.end()) { + return it->second.setGain(gain); + } + return ndk::ScopedAStatus::ok(); + } + void dump(int32_t portConfigId, int fd, const char** args, uint32_t numArgs) const { + if (auto it = mStreams.find(portConfigId); it != mStreams.end()) { + it->second.dump(fd, args, numArgs); + } + return; + } + + private: + // Maps port ids and port config ids to streams. Multimap because a port + // (not port config) can have multiple streams opened on it. + std::multimap mStreams; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamAlsa.h b/audio/include/core-impl/StreamAlsa.h new file mode 100644 index 0000000..c0dcb63 --- /dev/null +++ b/audio/include/core-impl/StreamAlsa.h @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include + +#include "Stream.h" +#include "alsa/Utils.h" + +namespace aidl::android::hardware::audio::core { + +// This class is intended to be used as a base class for implementations +// that use TinyAlsa. +// This class does not define a complete stream implementation, +// and should never be used on its own. Derived classes are expected to +// provide necessary overrides for all interface methods omitted here. +class StreamAlsa : public StreamCommonImpl { + public: + StreamAlsa(StreamContext* context, const Metadata& metadata, int readWriteRetries); + ~StreamAlsa(); + + // Methods of 'DriverInterface'. + ::android::status_t init(DriverCallbackInterface* callback) override; + ::android::status_t drain(StreamDescriptor::DrainMode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t standby() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + ::android::status_t refinePosition(StreamDescriptor::Position* position) override; + void shutdown() override; + ndk::ScopedAStatus setGain(float gain) override; + + protected: + // Called from 'start' to initialize 'mAlsaDeviceProxies', the vector must be non-empty. + virtual std::vector getDeviceProfiles() = 0; + + const size_t mBufferSizeFrames; + const size_t mFrameSizeBytes; + const int mSampleRate; + const bool mIsInput; + const std::optional mConfig; + const int mReadWriteRetries; + + private: + ::android::NBAIO_Format getPipeFormat() const; + ::android::sp<::android::MonoPipe> makeSink(bool writeCanBlock); + ::android::sp<::android::MonoPipeReader> makeSource(::android::MonoPipe* pipe); + void inputIoThread(size_t idx); + void outputIoThread(size_t idx); + void teardownIo(); + + std::atomic mGain = 1.0; + + // All fields below are only used on the worker thread. + std::vector mAlsaDeviceProxies; + // Only 'libnbaio_mono' is vendor-accessible, thus no access to the multi-reader Pipe. + std::vector<::android::sp<::android::MonoPipe>> mSinks; + std::vector<::android::sp<::android::MonoPipeReader>> mSources; + std::vector mIoThreads; + std::atomic mIoThreadIsRunning = false; // used by all threads +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamBluetooth.h b/audio/include/core-impl/StreamBluetooth.h new file mode 100644 index 0000000..2bdd6b2 --- /dev/null +++ b/audio/include/core-impl/StreamBluetooth.h @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include +#include +#include + +#include "core-impl/DevicePortProxy.h" +#include "core-impl/ModuleBluetooth.h" +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +class StreamBluetooth : public StreamCommonImpl { + public: + static bool checkConfigParams( + const ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& pcmConfig, + const ::aidl::android::media::audio::common::AudioConfigBase& config); + + StreamBluetooth( + StreamContext* context, const Metadata& metadata, + ModuleBluetooth::BtProfileHandles&& btHandles, + const std::shared_ptr<::android::bluetooth::audio::aidl::BluetoothAudioPortAidl>& + btDeviceProxy, + const ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& pcmConfig); + ~StreamBluetooth(); + + // Methods of 'DriverInterface'. + ::android::status_t init(DriverCallbackInterface*) override; + ::android::status_t drain(StreamDescriptor::DrainMode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t standby() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + void shutdown() override; + + // Overridden methods of 'StreamCommonImpl', called on a Binder thread. + ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override; + ndk::ScopedAStatus prepareToClose() override; + ndk::ScopedAStatus bluetoothParametersUpdated() override; + + private: + const size_t mFrameSizeBytes; + const bool mIsInput; + const std::weak_ptr mBluetoothA2dp; + const std::weak_ptr mBluetoothLe; + const size_t mPreferredDataIntervalUs; + mutable std::mutex mLock; + // The lock is also used to serialize calls to the proxy. + std::shared_ptr<::android::bluetooth::audio::aidl::BluetoothAudioPortAidl> mBtDeviceProxy + GUARDED_BY(mLock); // proxy may be null if the stream is not connected to a device +}; + +class StreamInBluetooth final : public StreamIn, public StreamBluetooth { + public: + friend class ndk::SharedRefBase; + + static int32_t getNominalLatencyMs(size_t dataIntervalUs); + + StreamInBluetooth( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones, + ModuleBluetooth::BtProfileHandles&& btHandles, + const std::shared_ptr<::android::bluetooth::audio::aidl::BluetoothAudioPortAidl>& + btDeviceProxy, + const ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& pcmConfig); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + ndk::ScopedAStatus getActiveMicrophones( + std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return) + override; +}; + +class StreamOutBluetooth final : public StreamOut, public StreamBluetooth { + public: + friend class ndk::SharedRefBase; + + static int32_t getNominalLatencyMs(size_t dataIntervalUs); + + StreamOutBluetooth( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo, + ModuleBluetooth::BtProfileHandles&& btHandles, + const std::shared_ptr<::android::bluetooth::audio::aidl::BluetoothAudioPortAidl>& + btDeviceProxy, + const ::aidl::android::hardware::bluetooth::audio::PcmConfiguration& pcmConfig); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamMmapStub.h b/audio/include/core-impl/StreamMmapStub.h new file mode 100644 index 0000000..0332007 --- /dev/null +++ b/audio/include/core-impl/StreamMmapStub.h @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include "core-impl/DriverStubImpl.h" +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +namespace mmap { + +struct DspSimulatorState { + const bool isInput; + const int sampleRate; + const int frameSizeBytes; + const size_t bufferSizeBytes; + std::mutex lock; + // The lock is also used to prevent un-mapping while the memory is in use. + uint8_t* sharedMemory GUARDED_BY(lock) = nullptr; + StreamDescriptor::Position mmapPos GUARDED_BY(lock); +}; + +class DspSimulatorLogic : public ::android::hardware::audio::common::StreamLogic { + protected: + explicit DspSimulatorLogic(DspSimulatorState& sharedState) : mSharedState(sharedState) {} + std::string init() override; + Status cycle() override; + + private: + DspSimulatorState& mSharedState; + uint32_t mCycleDurationUs = 0; + uint8_t* mMemBegin = nullptr; + uint8_t* mMemPos = nullptr; + int64_t mLastFrames = 0; +}; + +class DspSimulatorWorker + : public ::android::hardware::audio::common::StreamWorker { + public: + explicit DspSimulatorWorker(DspSimulatorState& sharedState) + : ::android::hardware::audio::common::StreamWorker(sharedState) {} +}; + +} // namespace mmap + +class DriverMmapStubImpl : public DriverStubImpl { + public: + explicit DriverMmapStubImpl(const StreamContext& context); + ::android::status_t init(DriverCallbackInterface* callback) override; + ::android::status_t drain(StreamDescriptor::DrainMode drainMode) override; + ::android::status_t pause() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + void shutdown() override; + ::android::status_t refinePosition(StreamDescriptor::Position* position) override; + ::android::status_t getMmapPositionAndLatency(StreamDescriptor::Position* position, + int32_t* latency) override; + + protected: + ::android::status_t initSharedMemory(int ashmemFd); + + private: + ::android::status_t releaseSharedMemory() REQUIRES(mState.lock); + ::android::status_t startWorkerIfNeeded(); + + mmap::DspSimulatorState mState; + mmap::DspSimulatorWorker mDspWorker; + bool mDspWorkerStarted = false; +}; + +class StreamMmapStub : public StreamCommonImpl, public DriverMmapStubImpl { + public: + static const std::string kCreateMmapBufferName; + + StreamMmapStub(StreamContext* context, const Metadata& metadata); + ~StreamMmapStub(); + + ndk::ScopedAStatus getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) override; + ndk::ScopedAStatus setVendorParameters(const std::vector& in_parameters, + bool in_async) override; + + private: + ndk::ScopedAStatus createMmapBuffer(MmapBufferDescriptor* desc); + + ndk::ScopedFileDescriptor mSharedMemoryFd; +}; + +class StreamInMmapStub final : public StreamIn, public StreamMmapStub { + public: + friend class ndk::SharedRefBase; + StreamInMmapStub( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +class StreamOutMmapStub final : public StreamOut, public StreamMmapStub { + public: + friend class ndk::SharedRefBase; + StreamOutMmapStub( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamOffloadStub.h b/audio/include/core-impl/StreamOffloadStub.h new file mode 100644 index 0000000..09b88aa --- /dev/null +++ b/audio/include/core-impl/StreamOffloadStub.h @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "core-impl/DriverStubImpl.h" +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +namespace offload { + +struct DspSimulatorState { + static constexpr int64_t kSkipBufferNotifyFrames = -1; + + const std::string formatEncoding; + const int sampleRate; + const int64_t earlyNotifyFrames; + DriverCallbackInterface* callback = nullptr; // set before starting DSP worker + std::mutex lock; + std::vector clipFramesLeft GUARDED_BY(lock); + int64_t bufferFramesLeft GUARDED_BY(lock) = 0; + int64_t bufferNotifyFrames GUARDED_BY(lock) = kSkipBufferNotifyFrames; +}; + +class DspSimulatorLogic : public ::android::hardware::audio::common::StreamLogic { + protected: + explicit DspSimulatorLogic(DspSimulatorState& sharedState) : mSharedState(sharedState) {} + std::string init() override; + Status cycle() override; + + private: + DspSimulatorState& mSharedState; +}; + +class DspSimulatorWorker + : public ::android::hardware::audio::common::StreamWorker { + public: + explicit DspSimulatorWorker(DspSimulatorState& sharedState) + : ::android::hardware::audio::common::StreamWorker(sharedState) {} +}; + +} // namespace offload + +class DriverOffloadStubImpl : public DriverStubImpl { + public: + explicit DriverOffloadStubImpl(const StreamContext& context); + ::android::status_t init(DriverCallbackInterface* callback) override; + ::android::status_t drain(StreamDescriptor::DrainMode drainMode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + void shutdown() override; + + private: + ::android::status_t startWorkerIfNeeded(); + + const int64_t mBufferNotifyFrames; + offload::DspSimulatorState mState; + offload::DspSimulatorWorker mDspWorker; + bool mDspWorkerStarted = false; +}; + +class StreamOffloadStub : public StreamCommonImpl, public DriverOffloadStubImpl { + public: + static const std::set& getSupportedEncodings(); + + StreamOffloadStub(StreamContext* context, const Metadata& metadata); + ~StreamOffloadStub(); +}; + +class StreamOutOffloadStub final : public StreamOut, public StreamOffloadStub { + public: + friend class ndk::SharedRefBase; + StreamOutOffloadStub( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamPrimary.h b/audio/include/core-impl/StreamPrimary.h new file mode 100644 index 0000000..06f8bc3 --- /dev/null +++ b/audio/include/core-impl/StreamPrimary.h @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include + +#include "DriverStubImpl.h" +#include "StreamAlsa.h" +#include "primary/PrimaryMixer.h" + +namespace aidl::android::hardware::audio::core { + +class StreamPrimary : public StreamAlsa { + public: + StreamPrimary(StreamContext* context, const Metadata& metadata); + + // Methods of 'DriverInterface'. + ::android::status_t init(DriverCallbackInterface* callback) override; + ::android::status_t drain(StreamDescriptor::DrainMode mode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t standby() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + ::android::status_t refinePosition(StreamDescriptor::Position* position) override; + void shutdown() override; + + // Overridden methods of 'StreamCommonImpl', called on a Binder thread. + ndk::ScopedAStatus setConnectedDevices(const ConnectedDevices& devices) override; + + protected: + std::vector getDeviceProfiles() override; + bool isStubStream(); + + const bool mIsAsynchronous; + int64_t mStartTimeNs = 0; + long mFramesSinceStart = 0; + bool mSkipNextTransfer = false; + + private: + using AlsaDeviceId = std::pair; + + static constexpr StreamPrimary::AlsaDeviceId kDefaultCardAndDeviceId{ + primary::PrimaryMixer::kAlsaCard, primary::PrimaryMixer::kAlsaDevice}; + static constexpr StreamPrimary::AlsaDeviceId kStubDeviceId{ + primary::PrimaryMixer::kInvalidAlsaCard, primary::PrimaryMixer::kInvalidAlsaDevice}; + + static AlsaDeviceId getCardAndDeviceId( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices); + static bool useStubStream(bool isInput, + const ::aidl::android::media::audio::common::AudioDevice& device); + + bool isStubStreamOnWorker() const { return mCurrAlsaDeviceId == kStubDeviceId; } + + DriverStubImpl mStubDriver; + mutable std::mutex mLock; + AlsaDeviceId mAlsaDeviceId GUARDED_BY(mLock) = kStubDeviceId; + + // Used by the worker thread only. + AlsaDeviceId mCurrAlsaDeviceId = kStubDeviceId; +}; + +class StreamInPrimary final : public StreamIn, public StreamPrimary, public StreamInHwGainHelper { + public: + friend class ndk::SharedRefBase; + StreamInPrimary( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + + ndk::ScopedAStatus getHwGain(std::vector* _aidl_return) override; + ndk::ScopedAStatus setHwGain(const std::vector& in_channelGains) override; +}; + +class StreamOutPrimary final : public StreamOut, + public StreamPrimary, + public StreamOutHwVolumeHelper { + public: + friend class ndk::SharedRefBase; + StreamOutPrimary(StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + + ndk::ScopedAStatus getHwVolume(std::vector* _aidl_return) override; + ndk::ScopedAStatus setHwVolume(const std::vector& in_channelVolumes) override; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamRemoteSubmix.h b/audio/include/core-impl/StreamRemoteSubmix.h new file mode 100644 index 0000000..28a446a --- /dev/null +++ b/audio/include/core-impl/StreamRemoteSubmix.h @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include "core-impl/Stream.h" +#include "deprecated/StreamSwitcher.h" +#include "r_submix/SubmixRoute.h" + +namespace aidl::android::hardware::audio::core { + +class StreamRemoteSubmix : public StreamCommonImpl { + public: + StreamRemoteSubmix( + StreamContext* context, const Metadata& metadata, + const ::aidl::android::media::audio::common::AudioDeviceAddress& deviceAddress); + ~StreamRemoteSubmix(); + + // Methods of 'DriverInterface'. + ::android::status_t init(DriverCallbackInterface*) override; + ::android::status_t drain(StreamDescriptor::DrainMode) override; + ::android::status_t flush() override; + ::android::status_t pause() override; + ::android::status_t standby() override; + ::android::status_t start() override; + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + ::android::status_t refinePosition(StreamDescriptor::Position* position) override; + void shutdown() override; + + // Overridden methods of 'StreamCommonImpl', called on a Binder thread. + ndk::ScopedAStatus prepareToClose() override; + + private: + long getDelayInUsForFrameCount(size_t frameCount); + size_t getStreamPipeSizeInFrames(); + ::android::status_t outWrite(void* buffer, size_t frameCount, size_t* actualFrameCount); + ::android::status_t inRead(void* buffer, size_t frameCount, size_t* actualFrameCount); + + const ::aidl::android::media::audio::common::AudioDeviceAddress mDeviceAddress; + const bool mIsInput; + r_submix::AudioConfig mStreamConfig; + std::shared_ptr mCurrentRoute = nullptr; + + // Limit for the number of error log entries to avoid spamming the logs. + static constexpr int kMaxErrorLogs = 5; + // The duration of kMaxReadFailureAttempts * READ_ATTEMPT_SLEEP_MS must be strictly inferior + // to the duration of a record buffer at the current record sample rate (of the device, not of + // the recording itself). Here we have: 3 * 5ms = 15ms < 1024 frames * 1000 / 48000 = 21.333ms + static constexpr int kMaxReadFailureAttempts = 3; + // 5ms between two read attempts when pipe is empty + static constexpr int kReadAttemptSleepUs = 5000; + + int64_t mStartTimeNs = 0; + long mFramesSinceStart = 0; + int mReadErrorCount = 0; + int mReadFailureCount = 0; + int mWriteShutdownCount = 0; +}; + +class StreamInRemoteSubmix final : public StreamIn, public deprecated::StreamSwitcher { + public: + friend class ndk::SharedRefBase; + StreamInRemoteSubmix( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + private: + DeviceSwitchBehavior switchCurrentStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) + override; + std::unique_ptr createNewStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices, + StreamContext* context, const Metadata& metadata) override; + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + ndk::ScopedAStatus getActiveMicrophones( + std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return) + override; +}; + +class StreamOutRemoteSubmix final : public StreamOut, public deprecated::StreamSwitcher { + public: + friend class ndk::SharedRefBase; + StreamOutRemoteSubmix( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + DeviceSwitchBehavior switchCurrentStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) + override; + std::unique_ptr createNewStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices, + StreamContext* context, const Metadata& metadata) override; + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamStub.h b/audio/include/core-impl/StreamStub.h new file mode 100644 index 0000000..cee44db --- /dev/null +++ b/audio/include/core-impl/StreamStub.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "core-impl/DriverStubImpl.h" +#include "core-impl/Stream.h" + +namespace aidl::android::hardware::audio::core { + +class StreamStub : public StreamCommonImpl, public DriverStubImpl { + public: + StreamStub(StreamContext* context, const Metadata& metadata); + ~StreamStub(); +}; + +class StreamInStub final : public StreamIn, public StreamStub { + public: + friend class ndk::SharedRefBase; + StreamInStub( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +class StreamOutStub final : public StreamOut, public StreamStub { + public: + friend class ndk::SharedRefBase; + StreamOutStub(StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/StreamUsb.h b/audio/include/core-impl/StreamUsb.h new file mode 100644 index 0000000..694fccf --- /dev/null +++ b/audio/include/core-impl/StreamUsb.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +#include + +#include "StreamAlsa.h" + +namespace aidl::android::hardware::audio::core { + +class StreamUsb : public StreamAlsa { + public: + StreamUsb(StreamContext* context, const Metadata& metadata); + + // Methods of 'DriverInterface'. + ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) override; + + // Overridden methods of 'StreamCommonImpl', called on a Binder thread. + ndk::ScopedAStatus setConnectedDevices(const ConnectedDevices& devices) override; + + protected: + std::vector getDeviceProfiles() override; + + mutable std::mutex mLock; + std::vector mConnectedDeviceProfiles GUARDED_BY(mLock); + std::atomic mConnectedDevicesUpdated = false; +}; + +class StreamInUsb final : public StreamIn, public StreamUsb { + public: + friend class ndk::SharedRefBase; + StreamInUsb( + StreamContext&& context, + const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata, + const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + ndk::ScopedAStatus getActiveMicrophones( + std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return) + override; +}; + +class StreamOutUsb final : public StreamOut, public StreamUsb, public StreamOutHwVolumeHelper { + public: + friend class ndk::SharedRefBase; + StreamOutUsb(StreamContext&& context, + const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata, + const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>& + offloadInfo); + + private: + void onClose(StreamDescriptor::State) override { defaultOnClose(); } + ndk::ScopedAStatus getHwVolume(std::vector* _aidl_return) override; + ndk::ScopedAStatus setHwVolume(const std::vector& in_channelVolumes) override; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/Telephony.h b/audio/include/core-impl/Telephony.h new file mode 100644 index 0000000..0f8e93f --- /dev/null +++ b/audio/include/core-impl/Telephony.h @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include + +namespace aidl::android::hardware::audio::core { + +class Telephony : public BnTelephony { + public: + Telephony(); + + private: + ndk::ScopedAStatus getSupportedAudioModes( + std::vector<::aidl::android::media::audio::common::AudioMode>* _aidl_return) override; + ndk::ScopedAStatus switchAudioMode( + ::aidl::android::media::audio::common::AudioMode in_mode) override; + ndk::ScopedAStatus setTelecomConfig(const TelecomConfig& in_config, + TelecomConfig* _aidl_return) override; + + const std::vector<::aidl::android::media::audio::common::AudioMode> mSupportedAudioModes = { + ::aidl::android::media::audio::common::AudioMode::NORMAL, + ::aidl::android::media::audio::common::AudioMode::RINGTONE, + ::aidl::android::media::audio::common::AudioMode::IN_CALL, + ::aidl::android::media::audio::common::AudioMode::IN_COMMUNICATION, + // Omit CALL_SCREEN for a better VTS coverage. + }; + TelecomConfig mTelecomConfig; +}; + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/core-impl/XmlConverter.h b/audio/include/core-impl/XmlConverter.h new file mode 100644 index 0000000..4b99d72 --- /dev/null +++ b/audio/include/core-impl/XmlConverter.h @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +#include +#include + +namespace aidl::android::hardware::audio::core::internal { + +template +class XmlConverter { + public: + XmlConverter(const std::string& configFilePath, + std::function(const char*)> readXmlConfig) + : XmlConverter(configFilePath, + ::android::audio_is_readable_configuration_file(configFilePath.c_str()), + readXmlConfig) {} + + const ::android::status_t& getStatus() const { return mStatus; } + + const std::string& getError() const { return mErrorMessage; } + + const std::optional& getXsdcConfig() const { return mXsdcConfig; } + + private: + XmlConverter(const std::string& configFilePath, const bool& isReadableConfigFile, + const std::function(const char*)>& readXmlConfig) + : mXsdcConfig{isReadableConfigFile ? readXmlConfig(configFilePath.c_str()) : std::nullopt}, + mStatus(mXsdcConfig ? ::android::OK : ::android::NO_INIT), + mErrorMessage(generateError(configFilePath, isReadableConfigFile, mStatus)) {} + + static std::string generateError(const std::string& configFilePath, + const bool& isReadableConfigFile, + const ::android::status_t& status) { + std::string errorMessage; + if (status != ::android::OK) { + if (configFilePath.empty()) { + errorMessage = "No audio configuration files found"; + } else if (!isReadableConfigFile) { + errorMessage = std::string("Could not read requested XML config file: \"") + .append(configFilePath) + .append("\""); + } else { + errorMessage = std::string("Invalid XML config file: \"") + .append(configFilePath) + .append("\""); + } + } + return errorMessage; + } + + const std::optional mXsdcConfig; + const ::android::status_t mStatus; + const std::string mErrorMessage; +}; + +/** + * Converts a vector of an xsd wrapper type to a flat vector of the + * corresponding AIDL type. + * + * Wrapper types are used in order to have well-formed xIncludes. In the + * example below, Modules is the wrapper type for Module. + * + * ... + * ... + * + */ +template +static ConversionResult> convertWrappedCollectionToAidl( + const std::vector& xsdcWrapperTypeVec, + std::function&(const W&)> getInnerTypeVec, + std::function(const X&)> convertToAidl) { + std::vector resultAidlTypeVec; + if (!xsdcWrapperTypeVec.empty()) { + /* + * xsdcWrapperTypeVec likely only contains one element; that is, it's + * likely that all the inner types that we need to convert are inside of + * xsdcWrapperTypeVec[0]. + */ + resultAidlTypeVec.reserve(getInnerTypeVec(xsdcWrapperTypeVec[0]).size()); + for (const W& xsdcWrapperType : xsdcWrapperTypeVec) { + for (const X& xsdcType : getInnerTypeVec(xsdcWrapperType)) { + resultAidlTypeVec.push_back(VALUE_OR_FATAL(convertToAidl(xsdcType))); + } + } + } + return resultAidlTypeVec; +} + +template +static ConversionResult>> convertCollectionToAidlOptionalValues( + const std::vector& xsdcTypeVec, + std::function(const X&)> convertToAidl) { + std::vector> resultAidlTypeVec; + resultAidlTypeVec.reserve(xsdcTypeVec.size()); + for (const X& xsdcType : xsdcTypeVec) { + resultAidlTypeVec.push_back( + std::optional(std::move(VALUE_OR_FATAL(convertToAidl(xsdcType))))); + } + return resultAidlTypeVec; +} + +template +static ConversionResult> convertCollectionToAidl( + const std::vector& xsdcTypeVec, + std::function(const X&)> convertToAidl) { + std::vector resultAidlTypeVec; + resultAidlTypeVec.reserve(xsdcTypeVec.size()); + for (const X& xsdcType : xsdcTypeVec) { + resultAidlTypeVec.push_back(VALUE_OR_FATAL(convertToAidl(xsdcType))); + } + return resultAidlTypeVec; +} + +/** + * Generates a map of xsd references, keyed by reference name, given a + * vector of wrapper types for the reference. + * + * Wrapper types are used in order to have well-formed xIncludes. In the + * example below, Wrapper is the wrapper type for Reference. + * + * ... + * ... + * + */ +template +std::unordered_map generateReferenceMap(const std::vector& xsdcWrapperTypeVec) { + std::unordered_map resultMap; + if (!xsdcWrapperTypeVec.empty()) { + /* + * xsdcWrapperTypeVec likely only contains one element; that is, it's + * likely that all the inner types that we need to convert are inside of + * xsdcWrapperTypeVec[0]. + */ + resultMap.reserve(xsdcWrapperTypeVec[0].getReference().size()); + for (const W& xsdcWrapperType : xsdcWrapperTypeVec) { + for (const R& xsdcReference : xsdcWrapperType.getReference()) { + resultMap.insert({xsdcReference.getName(), xsdcReference}); + } + } + } + return resultMap; +} +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/XsdcConversion.h b/audio/include/core-impl/XsdcConversion.h new file mode 100644 index 0000000..b298eee --- /dev/null +++ b/audio/include/core-impl/XsdcConversion.h @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "core-impl/Module.h" + +namespace aidl::android::hardware::audio::core::internal { + +namespace engineconfiguration = ::android::audio::policy::engine::configuration; +namespace aidlaudiocommon = ::aidl::android::media::audio::common; + +static constexpr const char kXsdcForceConfigForUse[] = "ForceUseFor"; + +ConversionResult convertForceUseToAidl( + const std::string& xsdcCriterionName, const std::string& xsdcCriterionValue); +ConversionResult convertDeviceAddressToAidl( + const std::string& xsdcAddress); +ConversionResult convertTelephonyModeToAidl( + const std::string& xsdcModeCriterionType); +ConversionResult convertDeviceTypeToAidl( + const std::string& xType); +ConversionResult>> +convertCapCriteriaCollectionToAidl( + const std::vector& xsdcCriteriaVec, + const std::vector& xsdcCriterionTypesVec); +ConversionResult convertCapCriterionV2ToAidl( + const engineconfiguration::CriterionType& xsdcCriterion, + const std::vector& xsdcCriterionTypesVec); +ConversionResult convertCurvePointToAidl( + const std::string& xsdcCurvePoint); +ConversionResult> convertModuleConfigToAidl( + const ::android::audio::policy::configuration::Modules::Module& moduleConfig); +ConversionResult convertAudioUsageToAidl( + const engineconfiguration::UsageEnumType& xsdcUsage); +ConversionResult convertAudioContentTypeToAidl( + const engineconfiguration::ContentType& xsdcContentType); +ConversionResult convertAudioSourceToAidl( + const engineconfiguration::SourceEnumType& xsdcSourceType); +ConversionResult convertAudioStreamTypeToAidl( + const engineconfiguration::Stream& xsdStreamType); +ConversionResult convertAudioFlagsToAidl( + const std::vector& xsdcFlagTypeVec); +std::unordered_map getLegacyProductStrategyMap(); +} // namespace aidl::android::hardware::audio::core::internal diff --git a/audio/include/core-impl/utils.h b/audio/include/core-impl/utils.h new file mode 100644 index 0000000..ae33227 --- /dev/null +++ b/audio/include/core-impl/utils.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +namespace aidl::android::hardware::audio::core { + +// Return whether all the elements in the vector are unique. +template +bool all_unique(const std::vector& v) { + return std::set(v.begin(), v.end()).size() == v.size(); +} + +// Erase all the specified elements from a map. +template +auto erase_all(C& c, const V& keys) { + auto oldSize = c.size(); + for (auto& k : keys) { + c.erase(k); + } + return oldSize - c.size(); +} + +// Erase all the elements in the container that satisfy the provided predicate. +template +auto erase_if(C& c, P pred) { + auto oldSize = c.size(); + for (auto it = c.begin(); it != c.end();) { + if (pred(*it)) { + it = c.erase(it); + } else { + ++it; + } + } + return oldSize - c.size(); +} + +// Erase all the elements in the map that have specified values. +template +auto erase_all_values(C& c, const V& values) { + return erase_if(c, [values](const auto& pair) { return values.count(pair.second) != 0; }); +} + +// Return non-zero count of elements for any of the provided keys. +template +size_t count_any(const M& m, const V& keys) { + for (auto& k : keys) { + if (size_t c = m.count(k); c != 0) return c; + } + return 0; +} + +// Assuming that M is a map whose values have an 'id' field, +// find an element with the specified id. +template +auto findById(M& m, int32_t id) { + return std::find_if(m.begin(), m.end(), [&](const auto& p) { return p.second.id == id; }); +} + +// Assuming that the vector contains elements with an 'id' field, +// find an element with the specified id. +template +auto findById(std::vector& v, int32_t id) { + return std::find_if(v.begin(), v.end(), [&](const auto& e) { return e.id == id; }); +} + +// Return elements from the vector that have specified ids, also +// optionally return which ids were not found. +template +std::vector selectByIds(std::vector& v, const std::vector& ids, + std::vector* missingIds = nullptr) { + std::vector result; + std::set idsSet(ids.begin(), ids.end()); + for (size_t i = 0; i < v.size(); ++i) { + T& e = v[i]; + if (idsSet.count(e.id) != 0) { + result.push_back(&v[i]); + idsSet.erase(e.id); + } + } + if (missingIds) { + *missingIds = std::vector(idsSet.begin(), idsSet.end()); + } + return result; +} + +// Assuming that M is a map whose keys' type is K and values' type is V, +// return the corresponding value of the given key from the map or default +// value if the key is not found. +template +auto findValueOrDefault(const M& m, const K& key, V defaultValue) { + auto it = m.find(key); + return it == m.end() ? defaultValue : it->second; +} + +// Assuming that M is a map whose keys' type is K, return the given key if it +// is found from the map or default value. +template +auto findKeyOrDefault(const M& m, const K& key, K defaultValue) { + auto it = m.find(key); + return it == m.end() ? defaultValue : key; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/include/effect-impl/EffectContext.h b/audio/include/effect-impl/EffectContext.h new file mode 100644 index 0000000..9e44349 --- /dev/null +++ b/audio/include/effect-impl/EffectContext.h @@ -0,0 +1,121 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once +#include +#include + +#include +#include +#include +#include + +#include +#include "EffectTypes.h" + +namespace aidl::android::hardware::audio::effect { + +class EffectContext { + public: + typedef ::android::AidlMessageQueue< + IEffect::Status, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> + StatusMQ; + typedef ::android::AidlMessageQueue< + float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> + DataMQ; + + EffectContext(size_t statusDepth, const Parameter::Common& common); + virtual ~EffectContext() { + if (mEfGroup) { + ::android::hardware::EventFlag::deleteEventFlag(&mEfGroup); + } + } + + void setVersion(int version) { mVersion = version; } + std::shared_ptr getStatusFmq() const; + std::shared_ptr getInputDataFmq() const; + std::shared_ptr getOutputDataFmq() const; + + float* getWorkBuffer(); + size_t getWorkBufferSize() const; + + // reset buffer status by abandon input data in FMQ + void resetBuffer(); + void dupeFmq(IEffect::OpenEffectReturn* effectRet); + size_t getInputFrameSize() const; + size_t getOutputFrameSize() const; + int getSessionId() const; + int getIoHandle() const; + + virtual void dupeFmqWithReopen(IEffect::OpenEffectReturn* effectRet); + + virtual RetCode setOutputDevice( + const std::vector& device); + + virtual std::vector + getOutputDevice(); + + virtual RetCode setAudioMode(const aidl::android::media::audio::common::AudioMode& mode); + virtual aidl::android::media::audio::common::AudioMode getAudioMode(); + + virtual RetCode setAudioSource(const aidl::android::media::audio::common::AudioSource& source); + virtual aidl::android::media::audio::common::AudioSource getAudioSource(); + + virtual RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo); + virtual Parameter::VolumeStereo getVolumeStereo(); + + virtual RetCode setCommon(const Parameter::Common& common); + virtual Parameter::Common getCommon(); + + virtual ::android::hardware::EventFlag* getStatusEventFlag(); + + virtual RetCode enable(); + virtual RetCode disable(); + virtual RetCode reset(); + + virtual RetCode startDraining(); + virtual RetCode finishDraining(); + virtual bool isDraining(); + + protected: + bool mIsDraining = false; + int mVersion = 0; + size_t mInputFrameSize = 0; + size_t mOutputFrameSize = 0; + size_t mInputChannelCount = 0; + size_t mOutputChannelCount = 0; + Parameter::Common mCommon = {}; + std::vector mOutputDevice = {}; + aidl::android::media::audio::common::AudioMode mMode = + aidl::android::media::audio::common::AudioMode::SYS_RESERVED_INVALID; + aidl::android::media::audio::common::AudioSource mSource = + aidl::android::media::audio::common::AudioSource::SYS_RESERVED_INVALID; + Parameter::VolumeStereo mVolumeStereo = {}; + RetCode updateIOFrameSize(const Parameter::Common& common); + RetCode notifyDataMqUpdate(); + + private: + // fmq and buffers + std::shared_ptr mStatusMQ = nullptr; + std::shared_ptr mInputMQ = nullptr; + std::shared_ptr mOutputMQ = nullptr; + // std::shared_ptr mRet; + // work buffer set by effect instances, the access and update are in same thread + std::vector mWorkBuffer = {}; + + ::android::hardware::EventFlag* mEfGroup = nullptr; +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effect-impl/EffectImpl.h b/audio/include/effect-impl/EffectImpl.h new file mode 100644 index 0000000..d3bb7f4 --- /dev/null +++ b/audio/include/effect-impl/EffectImpl.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once +#include +#include + +#include +#include + +#include "EffectContext.h" +#include "EffectThread.h" +#include "EffectTypes.h" +#include "effect-impl/EffectContext.h" +#include "effect-impl/EffectThread.h" +#include "effect-impl/EffectTypes.h" + +extern "C" binder_exception_t destroyEffect( + const std::shared_ptr& instanceSp); + +namespace aidl::android::hardware::audio::effect { + +class EffectImpl : public BnEffect, public EffectThread { + public: + EffectImpl() = default; + virtual ~EffectImpl() = default; + + virtual ndk::ScopedAStatus open(const Parameter::Common& common, + const std::optional& specific, + OpenEffectReturn* ret) override; + virtual ndk::ScopedAStatus close() override; + virtual ndk::ScopedAStatus command(CommandId id) override; + virtual ndk::ScopedAStatus reopen(OpenEffectReturn* ret) override; + + virtual ndk::ScopedAStatus getState(State* state) override; + virtual ndk::ScopedAStatus setParameter(const Parameter& param) override; + virtual ndk::ScopedAStatus getParameter(const Parameter::Id& id, Parameter* param) override; + + virtual ndk::ScopedAStatus setParameterCommon(const Parameter& param) REQUIRES(mImplMutex); + virtual ndk::ScopedAStatus getParameterCommon(const Parameter::Tag& tag, Parameter* param) + REQUIRES(mImplMutex); + + /* Methods MUST be implemented by each effect instances */ + virtual ndk::ScopedAStatus getDescriptor(Descriptor* desc) = 0; + virtual ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) = 0; + virtual ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) + REQUIRES(mImplMutex) = 0; + + virtual std::string getEffectName() = 0; + virtual std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex); + virtual RetCode releaseContext() REQUIRES(mImplMutex) = 0; + + /** + * @brief effectProcessImpl is running in worker thread which created in EffectThread. + * + * EffectThread will make sure effectProcessImpl only be called after startThread() successful + * and before stopThread() successful. + * + * effectProcessImpl implementation must not call any EffectThread interface, otherwise it will + * cause deadlock. + * + * @param in address of input float buffer. + * @param out address of output float buffer. + * @param samples number of samples to process. + * @return IEffect::Status + */ + virtual IEffect::Status effectProcessImpl(float* in, float* out, int samples) = 0; + + /** + * process() get data from data MQs, and call effectProcessImpl() for effect data processing. + * Its important for the implementation to use mImplMutex for context synchronization. + */ + void process() override; + + protected: + // current Hal version + int mVersion = 0; + // Use kEventFlagNotEmpty for V1 HAL, kEventFlagDataMqNotEmpty for V2 and above + int mDataMqNotEmptyEf = aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty; + + State mState GUARDED_BY(mImplMutex) = State::INIT; + + IEffect::Status status(binder_status_t status, size_t consumed, size_t produced); + void cleanUp(); + + std::mutex mImplMutex; + std::shared_ptr mImplContext GUARDED_BY(mImplMutex); + + /** + * Optional CommandId handling methods for effects to override. + * For CommandId::START, EffectImpl call commandImpl before starting the EffectThread + * processing. + * For CommandId::STOP and CommandId::RESET, EffectImpl call commandImpl after stop the + * EffectThread processing. + */ + virtual ndk::ScopedAStatus commandImpl(CommandId id) REQUIRES(mImplMutex); + + RetCode notifyEventFlag(uint32_t flag); + + std::string getEffectNameWithVersion() { + return getEffectName() + "V" + std::to_string(mVersion); + } + + ::android::hardware::EventFlag* mEventFlag; +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effect-impl/EffectRange.h b/audio/include/effect-impl/EffectRange.h new file mode 100644 index 0000000..a3ea01f --- /dev/null +++ b/audio/include/effect-impl/EffectRange.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +namespace aidl::android::hardware::audio::effect { + +template +bool isInRange(const T& value, const T& low, const T& high) { + return (value >= low) && (value <= high); +} + +template +bool isTupleInRange(const T& test, const T& min, const T& max, std::index_sequence) { + return (isInRange(std::get(test), std::get(min), std::get(max)) && ...); +} + +template > +bool isTupleInRange(const T& test, const T& min, const T& max) { + return isTupleInRange(test, min, max, std::make_index_sequence{}); +} + +template +bool isTupleInRange(const std::vector& cfgs, const T& min, const T& max, const F& func) { + auto minT = func(min), maxT = func(max); + return std::all_of(cfgs.cbegin(), cfgs.cend(), + [&](const T& cfg) { return isTupleInRange(func(cfg), minT, maxT); }); +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effect-impl/EffectThread.h b/audio/include/effect-impl/EffectThread.h new file mode 100644 index 0000000..9abcdb8 --- /dev/null +++ b/audio/include/effect-impl/EffectThread.h @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once +#include +#include +#include +#include + +#include +#include +#include + +#include "effect-impl/EffectContext.h" +#include "effect-impl/EffectTypes.h" + +namespace aidl::android::hardware::audio::effect { + +class EffectThread { + public: + virtual ~EffectThread(); + + // called by effect implementation + RetCode createThread(const std::string& name, int priority = ANDROID_PRIORITY_URGENT_AUDIO); + RetCode destroyThread(); + RetCode startThread(); + RetCode stopThread(); + RetCode startDraining(); + RetCode finishDraining(); + + // Will call process() in a loop if the thread is running. + void threadLoop(); + + /** + * process() call effectProcessImpl() for effect data processing, it is necessary for the + * processing to be called under Effect thread mutex mThreadMutex, to avoid the effect state + * change before/during data processing, and keep the thread and effect state consistent. + */ + virtual void process() = 0; + + protected: + bool mDraining GUARDED_BY(mThreadMutex) = false; + + private: + static constexpr int kMaxTaskNameLen = 15; + + std::mutex mThreadMutex; + std::condition_variable mCv; + bool mStop GUARDED_BY(mThreadMutex) = true; + bool mExit GUARDED_BY(mThreadMutex) = false; + + std::thread mThread; + int mPriority; + std::string mName; +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effect-impl/EffectTypes.h b/audio/include/effect-impl/EffectTypes.h new file mode 100644 index 0000000..66c0ff1 --- /dev/null +++ b/audio/include/effect-impl/EffectTypes.h @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once +#include + +#include +#include +#include +#include + +typedef binder_exception_t (*EffectCreateFunctor)( + const ::aidl::android::media::audio::common::AudioUuid*, + std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>*); +typedef binder_exception_t (*EffectDestroyFunctor)( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&); +typedef binder_exception_t (*EffectQueryFunctor)( + const ::aidl::android::media::audio::common::AudioUuid*, + ::aidl::android::hardware::audio::effect::Descriptor*); + +struct effect_dl_interface_s { + EffectCreateFunctor createEffectFunc; + EffectDestroyFunctor destroyEffectFunc; + EffectQueryFunctor queryEffectFunc; +}; + +namespace aidl::android::hardware::audio::effect { + +enum class RetCode { + SUCCESS, + ERROR_ILLEGAL_PARAMETER, /* Illegal parameter */ + ERROR_THREAD, /* Effect thread error */ + ERROR_NULL_POINTER, /* NULL pointer */ + ERROR_ALIGNMENT_ERROR, /* Memory alignment error */ + ERROR_BLOCK_SIZE_EXCEED, /* Maximum block size exceeded */ + ERROR_EFFECT_LIB_ERROR, /* Effect implementation library error */ + ERROR_EVENT_FLAG_ERROR /* Error with effect event flags */ +}; + +static const int INVALID_AUDIO_SESSION_ID = -1; + +inline std::ostream& operator<<(std::ostream& out, const RetCode& code) { + switch (code) { + case RetCode::SUCCESS: + return out << "SUCCESS"; + case RetCode::ERROR_ILLEGAL_PARAMETER: + return out << "ERROR_ILLEGAL_PARAMETER"; + case RetCode::ERROR_THREAD: + return out << "ERROR_THREAD"; + case RetCode::ERROR_NULL_POINTER: + return out << "ERROR_NULL_POINTER"; + case RetCode::ERROR_ALIGNMENT_ERROR: + return out << "ERROR_ALIGNMENT_ERROR"; + case RetCode::ERROR_BLOCK_SIZE_EXCEED: + return out << "ERROR_BLOCK_SIZE_EXCEED"; + case RetCode::ERROR_EFFECT_LIB_ERROR: + return out << "ERROR_EFFECT_LIB_ERROR"; + case RetCode::ERROR_EVENT_FLAG_ERROR: + return out << "ERROR_EVENT_FLAG_ERROR"; + } + + return out << "EnumError: " << code; +} + +#define RETURN_IF_ASTATUS_NOT_OK(status, message) \ + do { \ + const ::ndk::ScopedAStatus curr_status = (status); \ + if (!curr_status.isOk()) { \ + LOG(ERROR) << __func__ << ": line" << __LINE__ \ + << " return with status: " << curr_status.getDescription() << (message); \ + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( \ + curr_status.getExceptionCode(), (message)); \ + } \ + } while (0) + +#define RETURN_IF(expr, exception, message) \ + do { \ + if (expr) { \ + LOG(ERROR) << __func__ << ": line" << __LINE__ << " return with expr " << #expr; \ + return ndk::ScopedAStatus::fromExceptionCodeWithMessage((exception), (message)); \ + } \ + } while (0) + +#define RETURN_OK_IF(expr) \ + do { \ + if (expr) { \ + LOG(INFO) << __func__ << ": line" << __LINE__ << " return with expr " << #expr; \ + return ndk::ScopedAStatus::ok(); \ + } \ + } while (0) + +#define RETURN_VALUE_IF(expr, ret, log) \ + do { \ + if (expr) { \ + LOG(ERROR) << __func__ << ": line" << __LINE__ << " return with expr \"" << #expr \ + << "\":" << (log); \ + return ret; \ + } \ + } while (0) + +#define RETURN_IF_BINDER_EXCEPTION(functor) \ + { \ + binder_exception_t exception = functor; \ + if (EX_NONE != exception) { \ + LOG(ERROR) << #functor << ": failed with error " << exception; \ + return ndk::ScopedAStatus::fromExceptionCode(exception); \ + } \ + } + +/** + * Make a Range::$EffectType$Range. + * T: The $EffectType$, Visualizer for example. + * Tag: The union tag name in $EffectType$ definition, latencyMs for example. + * l: The value of Range::$EffectType$Range.min. + * r: The value of Range::$EffectType$Range.max. + */ +#define MAKE_RANGE(T, Tag, l, r) \ + { .min = T::make(l), .max = T::make(r) } + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effectFactory-impl/EffectConfig.h b/audio/include/effectFactory-impl/EffectConfig.h new file mode 100644 index 0000000..60bb9be --- /dev/null +++ b/audio/include/effectFactory-impl/EffectConfig.h @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include "effect-impl/EffectTypes.h" + +namespace aidl::android::hardware::audio::effect { + +/** + * Library contains a mapping from library name to path. + * Effect contains a mapping from effect name to Libraries and implementation UUID. + * Pre/post processor contains a mapping from processing name to effect names. + */ +class EffectConfig { + public: + explicit EffectConfig(const std::string& file); + + struct Library { + std::string name; // library name + ::aidl::android::media::audio::common::AudioUuid uuid; // implementation UUID + std::optional<::aidl::android::media::audio::common::AudioUuid> type; // optional type UUID + }; + // + struct EffectLibraries { + std::optional proxyLibrary; + std::vector libraries; + }; + + int getSkippedElements() const { return mSkippedElements; } + const std::unordered_map getLibraryMap() const { return mLibraryMap; } + const std::unordered_map getEffectsMap() const { + return mEffectsMap; + } + + static bool findUuid(const std::pair& effectElem, + ::aidl::android::media::audio::common::AudioUuid* uuid); + + using ProcessingLibrariesMap = std::map>; + const ProcessingLibrariesMap& getProcessingMap() const; + + private: +#ifdef __LP64__ +#define SOUND_FX_PATH "/lib64/soundfx/" +#else +#define SOUND_FX_PATH "/lib/soundfx/" +#endif + static constexpr const char* kEffectLibPath[] = + { "/odm" SOUND_FX_PATH, "/vendor" SOUND_FX_PATH, "/system" SOUND_FX_PATH }; + + static constexpr const char* kEffectLibApexPath = SOUND_FX_PATH; +#undef SOUND_FX_PATH + + int mSkippedElements; + /* Parsed Libraries result */ + std::unordered_map mLibraryMap; + /* Parsed Effects result */ + std::unordered_map mEffectsMap; + /** + * For parsed pre/post processing result: {key: AudioStreamType/AudioSource/AudioDevice, value: + * EffectLibraries} + */ + ProcessingLibrariesMap mProcessingMap; + + /** @return all `node`s children that are elements and match the tag if provided. */ + std::vector> getChildren( + const tinyxml2::XMLNode& node, const char* childTag = nullptr); + + /** Parse a library xml note and push the result in mLibraryMap or return false on failure. */ + bool parseLibrary(const tinyxml2::XMLElement& xml); + + /** Parse an effect from an xml element describing it. + * @return true and pushes the effect in mEffectsMap on success, false on failure. + */ + bool parseEffect(const tinyxml2::XMLElement& xml); + + bool parseProcessing(Processing::Type::Tag typeTag, const tinyxml2::XMLElement& xml); + + // Function to parse effect.library name and effect.uuid from xml + bool parseLibrary(const tinyxml2::XMLElement& xml, struct Library& library, + bool isProxy = false); + + const char* dump(const tinyxml2::XMLElement& element, + tinyxml2::XMLPrinter&& printer = {}) const; + + bool resolveLibrary(const std::string& path, std::string* resolvedPath); + + std::optional stringToProcessingType(Processing::Type::Tag typeTag, + const std::string& type, + const std::string& address); +}; + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/include/effectFactory-impl/EffectFactory.h b/audio/include/effectFactory-impl/EffectFactory.h new file mode 100644 index 0000000..d0b8204 --- /dev/null +++ b/audio/include/effectFactory-impl/EffectFactory.h @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include "EffectConfig.h" + +namespace aidl::android::hardware::audio::effect { + +class Factory : public BnFactory { + public: + explicit Factory(const std::string& file); + /** + * @brief Get identity of all effects supported by the device, with the optional filter by type + * and/or by instance UUID. + * + * @param in_type Type UUID. + * @param in_instance Instance UUID. + * @param in_proxy Proxy UUID. + * @param out_descriptor List of Descriptors. + * @return ndk::ScopedAStatus + */ + ndk::ScopedAStatus queryEffects( + const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_type, + const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_instance, + const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_proxy, + std::vector* out_descriptor) override; + + /** + * @brief Query list of defined processing, with the optional filter by AudioStreamType + * + * @param in_type Type of processing, could be AudioStreamType or AudioSource. Optional. + * @param _aidl_return List of processing filtered by in_type. + * @return ndk::ScopedAStatus + */ + ndk::ScopedAStatus queryProcessing(const std::optional& in_type, + std::vector* _aidl_return) override; + + /** + * @brief Create an effect instance for a certain implementation (identified by UUID). + * + * @param in_impl_uuid Effect implementation UUID. + * @param _aidl_return A pointer to created effect instance. + * @return ndk::ScopedAStatus + */ + ndk::ScopedAStatus createEffect( + const ::aidl::android::media::audio::common::AudioUuid& in_impl_uuid, + std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>* _aidl_return) + override; + + /** + * @brief Destroy an effect instance. + * + * @param in_handle Effect instance handle. + * @return ndk::ScopedAStatus + */ + ndk::ScopedAStatus destroyEffect( + const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_handle) + override; + + private: + const EffectConfig mConfig; + ~Factory(); + + std::mutex mMutex; + // Set of effect descriptors supported by the devices. + std::set mDescSet GUARDED_BY(mMutex); + std::set mIdentitySet GUARDED_BY(mMutex); + + static constexpr int kMapEntryHandleIndex = 0; + static constexpr int kMapEntryInterfaceIndex = 1; + static constexpr int kMapEntryLibNameIndex = 2; + typedef std::tuple> /* dlHandle */, + std::unique_ptr /* interfaces */, + std::string /* library name */> + DlEntry; + + std::map mEffectLibMap + GUARDED_BY(mMutex); + + typedef std::pair EffectEntry; + std::map, EffectEntry, std::owner_less<>> mEffectMap GUARDED_BY(mMutex); + + ndk::ScopedAStatus destroyEffectImpl_l(const std::shared_ptr& in_handle) + REQUIRES(mMutex); + void cleanupEffectMap_l() REQUIRES(mMutex); + bool openEffectLibrary(const ::aidl::android::media::audio::common::AudioUuid& impl, + const std::string& path); + void createIdentityWithConfig( + const EffectConfig::Library& configLib, + const ::aidl::android::media::audio::common::AudioUuid& typeUuidStr, + const std::optional<::aidl::android::media::audio::common::AudioUuid> proxyUuid); + + ndk::ScopedAStatus getDescriptorWithUuid_l( + const aidl::android::media::audio::common::AudioUuid& uuid, Descriptor* desc) + REQUIRES(mMutex); + + void loadEffectLibs(); + /* Get effect_dl_interface_s from library handle */ + void getDlSyms_l(DlEntry& entry) REQUIRES(mMutex); +}; + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/loudnessEnhancer/Android.bp b/audio/loudnessEnhancer/Android.bp new file mode 100644 index 0000000..4b30484 --- /dev/null +++ b/audio/loudnessEnhancer/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libloudnessenhancersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "LoudnessEnhancerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default", + ], +} diff --git a/audio/loudnessEnhancer/LoudnessEnhancerSw.cpp b/audio/loudnessEnhancer/LoudnessEnhancerSw.cpp new file mode 100644 index 0000000..1e70716 --- /dev/null +++ b/audio/loudnessEnhancer/LoudnessEnhancerSw.cpp @@ -0,0 +1,167 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_LoudnessEnhancerSw" +#include +#include +#include + +#include "LoudnessEnhancerSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidLoudnessEnhancerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::LoudnessEnhancerSw; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = LoudnessEnhancerSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string LoudnessEnhancerSw::kEffectName = "LoudnessEnhancerSw"; +const Descriptor LoudnessEnhancerSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidLoudnessEnhancer(), + .uuid = getEffectImplUuidLoudnessEnhancerSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = LoudnessEnhancerSw::kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus LoudnessEnhancerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus LoudnessEnhancerSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::loudnessEnhancer != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& leParam = specific.get(); + auto tag = leParam.getTag(); + + switch (tag) { + case LoudnessEnhancer::gainMb: { + RETURN_IF(mContext->setLeGainMb(leParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setGainMbFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported"); + } + } +} + +ndk::ScopedAStatus LoudnessEnhancerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::loudnessEnhancerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto leId = id.get(); + auto leIdTag = leId.getTag(); + switch (leIdTag) { + case LoudnessEnhancer::Id::commonTag: + return getParameterLoudnessEnhancer(leId.get(), + specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(leIdTag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported"); + } +} + +ndk::ScopedAStatus LoudnessEnhancerSw::getParameterLoudnessEnhancer( + const LoudnessEnhancer::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + LoudnessEnhancer leParam; + switch (tag) { + case LoudnessEnhancer::gainMb: { + leParam.set(mContext->getLeGainMb()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "LoudnessEnhancerTagNotSupported"); + } + } + + specific->set(leParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr LoudnessEnhancerSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode LoudnessEnhancerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status LoudnessEnhancerSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/loudnessEnhancer/LoudnessEnhancerSw.h b/audio/loudnessEnhancer/LoudnessEnhancerSw.h new file mode 100644 index 0000000..cf71a5f --- /dev/null +++ b/audio/loudnessEnhancer/LoudnessEnhancerSw.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class LoudnessEnhancerSwContext final : public EffectContext { + public: + LoudnessEnhancerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setLeGainMb(int gainMb) { + // TODO : Add implementation to apply new gain + mGainMb = gainMb; + return RetCode::SUCCESS; + } + int getLeGainMb() const { return mGainMb; } + + private: + int mGainMb = 0; // Default Gain +}; + +class LoudnessEnhancerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Descriptor kDescriptor; + LoudnessEnhancerSw() { LOG(DEBUG) << __func__; } + ~LoudnessEnhancerSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterLoudnessEnhancer(const LoudnessEnhancer::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/main.cpp b/audio/main.cpp new file mode 100644 index 0000000..0b3e3ba --- /dev/null +++ b/audio/main.cpp @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#define LOG_TAG "AHAL_Main" +#include +#include +#include +#include +#include + +#include "core-impl/AudioPolicyConfigXmlConverter.h" +#include "core-impl/ChildInterface.h" +#include "core-impl/Config.h" +#include "core-impl/Module.h" + +using aidl::android::hardware::audio::core::ChildInterface; +using aidl::android::hardware::audio::core::Config; +using aidl::android::hardware::audio::core::Module; +using aidl::android::hardware::audio::core::internal::AudioPolicyConfigXmlConverter; + +namespace { + +ChildInterface createModule(const std::string& name, + std::unique_ptr&& config) { + ChildInterface result; + { + auto moduleType = Module::typeFromString(name); + if (!moduleType.has_value()) { + LOG(ERROR) << __func__ << ": module type \"" << name << "\" is not supported"; + return result; + } + auto module = Module::createInstance(*moduleType, std::move(config)); + if (module == nullptr) return result; + result = std::move(module); + } + const std::string moduleFqn = std::string().append(Module::descriptor).append("/").append(name); + binder_status_t status = AServiceManager_addService(result.getBinder(), moduleFqn.c_str()); + if (status != STATUS_OK) { + LOG(ERROR) << __func__ << ": failed to register service for \"" << moduleFqn << "\""; + return ChildInterface(); + } + return result; +}; + +} // namespace + +int main() { + // Random values are used in the implementation. + std::srand(std::time(nullptr)); + + // This is a debug implementation, always enable debug logging. + android::base::SetMinimumLogSeverity(::android::base::DEBUG); + // For more logs, use VERBOSE, however this may hinder performance. + // android::base::SetMinimumLogSeverity(::android::base::VERBOSE); + ABinderProcess_setThreadPoolMaxThreadCount(16); + ABinderProcess_startThreadPool(); + + // Guaranteed log for b/210919187 and logd_integration_test + LOG(INFO) << "Init for Audio AIDL HAL"; + + AudioPolicyConfigXmlConverter audioPolicyConverter{ + ::android::audio_get_audio_policy_config_file()}; + + // Make the default config service + auto config = ndk::SharedRefBase::make(audioPolicyConverter); + const std::string configFqn = std::string().append(Config::descriptor).append("/default"); + binder_status_t status = + AServiceManager_addService(config->asBinder().get(), configFqn.c_str()); + if (status != STATUS_OK) { + LOG(ERROR) << "failed to register service for \"" << configFqn << "\""; + } + + // Make modules + std::vector> moduleInstances; + auto configs(audioPolicyConverter.releaseModuleConfigs()); + for (std::pair>& configPair : *configs) { + std::string name = configPair.first; + if (auto instance = createModule(name, std::move(configPair.second)); instance) { + moduleInstances.push_back(std::move(instance)); + } + } + + ABinderProcess_joinThreadPool(); + return EXIT_FAILURE; // should not reach +} diff --git a/audio/noiseSuppression/Android.bp b/audio/noiseSuppression/Android.bp new file mode 100644 index 0000000..66fe427 --- /dev/null +++ b/audio/noiseSuppression/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libnssw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "NoiseSuppressionSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/noiseSuppression/NoiseSuppressionSw.cpp b/audio/noiseSuppression/NoiseSuppressionSw.cpp new file mode 100644 index 0000000..d304416 --- /dev/null +++ b/audio/noiseSuppression/NoiseSuppressionSw.cpp @@ -0,0 +1,189 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#define LOG_TAG "AHAL_NoiseSuppressionSw" + +#define LOG_TAG "AHAL_NoiseSuppressionSw" +#include +#include +#include + +#include "NoiseSuppressionSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidNoiseSuppressionSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::NoiseSuppressionSw; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidNoiseSuppressionSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidNoiseSuppressionSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = NoiseSuppressionSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string NoiseSuppressionSw::kEffectName = "NoiseSuppressionSw"; +const Descriptor NoiseSuppressionSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidNoiseSuppression(), + .uuid = getEffectImplUuidNoiseSuppressionSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::PRE_PROC, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::NONE}, + .name = NoiseSuppressionSw::kEffectName, + .implementor = "The Android Open Source Project"}}; + +ndk::ScopedAStatus NoiseSuppressionSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus NoiseSuppressionSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::noiseSuppression != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + auto tag = param.getTag(); + + switch (tag) { + case NoiseSuppression::level: { + RETURN_IF(mContext->setLevel(param.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "levelNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case NoiseSuppression::type: { + RETURN_IF(mContext->setType(param.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "typeNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case NoiseSuppression::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported"); + } + } +} + +ndk::ScopedAStatus NoiseSuppressionSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::noiseSuppressionTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto specificId = id.get(); + auto specificIdTag = specificId.getTag(); + switch (specificIdTag) { + case NoiseSuppression::Id::commonTag: + return getParameterNoiseSuppression(specificId.get(), + specific); + case NoiseSuppression::Id::vendorExtensionTag: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported"); + } + } +} + +ndk::ScopedAStatus NoiseSuppressionSw::getParameterNoiseSuppression( + const NoiseSuppression::Tag& tag, Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + NoiseSuppression param; + switch (tag) { + case NoiseSuppression::level: { + param.set(mContext->getLevel()); + break; + } + case NoiseSuppression::type: { + param.set(mContext->getType()); + break; + } + case NoiseSuppression::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "NoiseSuppressionTagNotSupported"); + } + } + + specific->set(param); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr NoiseSuppressionSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode NoiseSuppressionSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status NoiseSuppressionSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode NoiseSuppressionSwContext::setLevel(NoiseSuppression::Level level) { + mLevel = level; + return RetCode::SUCCESS; +} + +NoiseSuppression::Level NoiseSuppressionSwContext::getLevel() { + return mLevel; +} + +RetCode NoiseSuppressionSwContext::setType(NoiseSuppression::Type type) { + mType = type; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/noiseSuppression/NoiseSuppressionSw.h b/audio/noiseSuppression/NoiseSuppressionSw.h new file mode 100644 index 0000000..acef8ee --- /dev/null +++ b/audio/noiseSuppression/NoiseSuppressionSw.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class NoiseSuppressionSwContext final : public EffectContext { + public: + NoiseSuppressionSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setLevel(NoiseSuppression::Level level); + NoiseSuppression::Level getLevel(); + RetCode setType(NoiseSuppression::Type type); + NoiseSuppression::Type getType() { return mType; } + + private: + NoiseSuppression::Level mLevel = NoiseSuppression::Level::LOW; + NoiseSuppression::Type mType = NoiseSuppression::Type::SINGLE_CHANNEL; +}; + +class NoiseSuppressionSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const bool kStrengthSupported; + static const Descriptor kDescriptor; + NoiseSuppressionSw() { LOG(DEBUG) << __func__; } + ~NoiseSuppressionSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterNoiseSuppression(const NoiseSuppression::Tag& tag, + Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/presetReverb/Android.bp b/audio/presetReverb/Android.bp new file mode 100644 index 0000000..15b4632 --- /dev/null +++ b/audio/presetReverb/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libpresetreverbsw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "PresetReverbSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/presetReverb/PresetReverbSw.cpp b/audio/presetReverb/PresetReverbSw.cpp new file mode 100644 index 0000000..2ac2010 --- /dev/null +++ b/audio/presetReverb/PresetReverbSw.cpp @@ -0,0 +1,181 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_PresetReverbSw" +#include +#include +#include +#include + +#include "PresetReverbSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidPresetReverbSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::PresetReverbSw; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidPresetReverbSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidPresetReverbSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = PresetReverbSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string PresetReverbSw::kEffectName = "PresetReverbSw"; + +const std::vector PresetReverbSw::kSupportedPresets{ + ndk::enum_range().begin(), + ndk::enum_range().end()}; + +const std::vector PresetReverbSw::kRanges = { + MAKE_RANGE(PresetReverb, supportedPresets, PresetReverbSw::kSupportedPresets, + PresetReverbSw::kSupportedPresets)}; + +const Capability PresetReverbSw::kCapability = { + .range = Range::make(PresetReverbSw::kRanges)}; + +const Descriptor PresetReverbSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidPresetReverb(), + .uuid = getEffectImplUuidPresetReverbSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = PresetReverbSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = PresetReverbSw::kCapability}; + +ndk::ScopedAStatus PresetReverbSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus PresetReverbSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::presetReverb != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& prParam = specific.get(); + RETURN_IF(!inRange(prParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = prParam.getTag(); + + switch (tag) { + case PresetReverb::preset: { + RETURN_IF( + mContext->setPRPreset(prParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setPresetFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "PresetReverbTagNotSupported"); + } + } +} + +ndk::ScopedAStatus PresetReverbSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::presetReverbTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto prId = id.get(); + auto prIdTag = prId.getTag(); + switch (prIdTag) { + case PresetReverb::Id::commonTag: + return getParameterPresetReverb(prId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "PresetReverbTagNotSupported"); + } +} + +ndk::ScopedAStatus PresetReverbSw::getParameterPresetReverb(const PresetReverb::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + PresetReverb prParam; + switch (tag) { + case PresetReverb::preset: { + prParam.set(mContext->getPRPreset()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "PresetReverbTagNotSupported"); + } + } + + specific->set(prParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr PresetReverbSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode PresetReverbSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status PresetReverbSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/presetReverb/PresetReverbSw.h b/audio/presetReverb/PresetReverbSw.h new file mode 100644 index 0000000..61fc88c --- /dev/null +++ b/audio/presetReverb/PresetReverbSw.h @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class PresetReverbSwContext final : public EffectContext { + public: + PresetReverbSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + RetCode setPRPreset(PresetReverb::Presets preset) { + // TODO : Add implementation to modify Presets + mPreset = preset; + return RetCode::SUCCESS; + } + PresetReverb::Presets getPRPreset() const { return mPreset; } + + private: + PresetReverb::Presets mPreset = PresetReverb::Presets::NONE; +}; + +class PresetReverbSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const std::vector kSupportedPresets; + static const std::vector kRanges; + static const Capability kCapability; + static const Descriptor kDescriptor; + PresetReverbSw() { LOG(DEBUG) << __func__; } + ~PresetReverbSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + std::shared_ptr mContext GUARDED_BY(mImplMutex); + + ndk::ScopedAStatus getParameterPresetReverb(const PresetReverb::Tag& tag, + Parameter::Specific* specific) REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/primary/PrimaryMixer.cpp b/audio/primary/PrimaryMixer.cpp new file mode 100644 index 0000000..577d010 --- /dev/null +++ b/audio/primary/PrimaryMixer.cpp @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_PrimaryMixer" + +#include "PrimaryMixer.h" + +namespace aidl::android::hardware::audio::core::primary { + +// static +PrimaryMixer& PrimaryMixer::getInstance() { + static PrimaryMixer gInstance; + return gInstance; +} + +} // namespace aidl::android::hardware::audio::core::primary diff --git a/audio/primary/PrimaryMixer.h b/audio/primary/PrimaryMixer.h new file mode 100644 index 0000000..760d42f --- /dev/null +++ b/audio/primary/PrimaryMixer.h @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "alsa/Mixer.h" + +namespace aidl::android::hardware::audio::core::primary { + +class PrimaryMixer : public alsa::Mixer { + public: + static constexpr int kInvalidAlsaCard = -1; + static constexpr int kInvalidAlsaDevice = -1; + static constexpr int kAlsaCard = 0; + static constexpr int kAlsaDevice = 0; + + static PrimaryMixer& getInstance(); + + private: + PrimaryMixer() : alsa::Mixer(kAlsaCard) {} +}; + +} // namespace aidl::android::hardware::audio::core::primary diff --git a/audio/primary/StreamPrimary.cpp b/audio/primary/StreamPrimary.cpp new file mode 100644 index 0000000..8455680 --- /dev/null +++ b/audio/primary/StreamPrimary.cpp @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_StreamPrimary" + +#include + +#include +#include +#include +#include +#include +#include + +#include "core-impl/StreamPrimary.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneInfo; +using android::base::GetBoolProperty; + +namespace aidl::android::hardware::audio::core { + +StreamPrimary::StreamPrimary(StreamContext* context, const Metadata& metadata) + : StreamAlsa(context, metadata, 3 /*readWriteRetries*/), + mIsAsynchronous(!!getContext().getAsyncCallback()), + mStubDriver(getContext()) { + context->startStreamDataProcessor(); +} + +::android::status_t StreamPrimary::init(DriverCallbackInterface* callback) { + RETURN_STATUS_IF_ERROR(mStubDriver.init(callback)); + return StreamAlsa::init(callback); +} + +::android::status_t StreamPrimary::drain(StreamDescriptor::DrainMode mode) { + return isStubStreamOnWorker() ? mStubDriver.drain(mode) : StreamAlsa::drain(mode); +} + +::android::status_t StreamPrimary::flush() { + RETURN_STATUS_IF_ERROR(isStubStreamOnWorker() ? mStubDriver.flush() : StreamAlsa::flush()); + // TODO(b/372951987): consider if this needs to be done from 'StreamInWorkerLogic::cycle'. + return mIsInput ? standby() : ::android::OK; +} + +::android::status_t StreamPrimary::pause() { + return isStubStreamOnWorker() ? mStubDriver.pause() : StreamAlsa::pause(); +} + +::android::status_t StreamPrimary::standby() { + return isStubStreamOnWorker() ? mStubDriver.standby() : StreamAlsa::standby(); +} + +::android::status_t StreamPrimary::start() { + bool isStub = true, shutdownAlsaStream = false; + { + std::lock_guard l(mLock); + isStub = mAlsaDeviceId == kStubDeviceId; + shutdownAlsaStream = + mCurrAlsaDeviceId != mAlsaDeviceId && mCurrAlsaDeviceId != kStubDeviceId; + mCurrAlsaDeviceId = mAlsaDeviceId; + } + if (shutdownAlsaStream) { + StreamAlsa::shutdown(); // Close currently opened ALSA devices. + } + if (isStub) { + return mStubDriver.start(); + } + RETURN_STATUS_IF_ERROR(StreamAlsa::start()); + mStartTimeNs = ::android::uptimeNanos(); + mFramesSinceStart = 0; + mSkipNextTransfer = false; + return ::android::OK; +} + +::android::status_t StreamPrimary::transfer(void* buffer, size_t frameCount, + size_t* actualFrameCount, int32_t* latencyMs) { + if (isStubStreamOnWorker()) { + return mStubDriver.transfer(buffer, frameCount, actualFrameCount, latencyMs); + } + // This is a workaround for the emulator implementation which has a host-side buffer + // and is not being able to achieve real-time behavior similar to ADSPs (b/302587331). + if (!mSkipNextTransfer) { + RETURN_STATUS_IF_ERROR( + StreamAlsa::transfer(buffer, frameCount, actualFrameCount, latencyMs)); + } else { + LOG(DEBUG) << __func__ << ": skipping transfer (" << frameCount << " frames)"; + *actualFrameCount = frameCount; + if (mIsInput) memset(buffer, 0, frameCount * mFrameSizeBytes); + mSkipNextTransfer = false; + } + if (!mIsAsynchronous) { + const long bufferDurationUs = + (*actualFrameCount) * MICROS_PER_SECOND / mContext.getSampleRate(); + const auto totalDurationUs = + (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND; + mFramesSinceStart += *actualFrameCount; + const long totalOffsetUs = + mFramesSinceStart * MICROS_PER_SECOND / mContext.getSampleRate() - totalDurationUs; + LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs; + if (totalOffsetUs > 0) { + const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs); + LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us"; + usleep(sleepTimeUs); + } else { + mSkipNextTransfer = true; + } + } else { + LOG(VERBOSE) << __func__ << ": asynchronous transfer"; + } + return ::android::OK; +} + +::android::status_t StreamPrimary::refinePosition(StreamDescriptor::Position*) { + // Since not all data is actually sent to the HAL, use the position maintained by Stream class + // which accounts for all frames passed from / to the client. + return ::android::OK; +} + +void StreamPrimary::shutdown() { + StreamAlsa::shutdown(); + mStubDriver.shutdown(); +} + +ndk::ScopedAStatus StreamPrimary::setConnectedDevices(const ConnectedDevices& devices) { + LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(devices); + if (devices.size() > 1) { + LOG(ERROR) << __func__ << ": primary stream can only be connected to one device, got: " + << devices.size(); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + { + const bool useStubDriver = devices.empty() || useStubStream(mIsInput, devices[0]); + std::lock_guard l(mLock); + mAlsaDeviceId = useStubDriver ? kStubDeviceId : getCardAndDeviceId(devices); + } + if (!devices.empty()) { + auto streamDataProcessor = getContext().getStreamDataProcessor().lock(); + if (streamDataProcessor != nullptr) { + streamDataProcessor->setAudioDevice(devices[0]); + } + } + return StreamAlsa::setConnectedDevices(devices); +} + +std::vector StreamPrimary::getDeviceProfiles() { + return {alsa::DeviceProfile{.card = mCurrAlsaDeviceId.first, + .device = mCurrAlsaDeviceId.second, + .direction = mIsInput ? PCM_IN : PCM_OUT, + .isExternal = false}}; +} + +bool StreamPrimary::isStubStream() { + std::lock_guard l(mLock); + return mAlsaDeviceId == kStubDeviceId; +} + +// static +StreamPrimary::AlsaDeviceId StreamPrimary::getCardAndDeviceId( + const std::vector& devices) { + if (devices.empty() || devices[0].address.getTag() != AudioDeviceAddress::id) { + return kDefaultCardAndDeviceId; + } + std::string deviceAddress = devices[0].address.get(); + AlsaDeviceId cardAndDeviceId; + if (const size_t suffixPos = deviceAddress.rfind("CARD_"); + suffixPos == std::string::npos || + sscanf(deviceAddress.c_str() + suffixPos, "CARD_%d_DEV_%d", &cardAndDeviceId.first, + &cardAndDeviceId.second) != 2) { + return kDefaultCardAndDeviceId; + } + LOG(DEBUG) << __func__ << ": parsed with card id " << cardAndDeviceId.first << ", device id " + << cardAndDeviceId.second; + return cardAndDeviceId; +} + +// static +bool StreamPrimary::useStubStream( + bool isInput, const ::aidl::android::media::audio::common::AudioDevice& device) { + static const bool kSimulateInput = + GetBoolProperty("ro.boot.audio.tinyalsa.simulate_input", false); + static const bool kSimulateOutput = + GetBoolProperty("ro.boot.audio.tinyalsa.ignore_output", false); + if (isInput) { + return kSimulateInput || device.type.type == AudioDeviceType::IN_TELEPHONY_RX || + device.type.type == AudioDeviceType::IN_FM_TUNER || + device.type.connection == AudioDeviceDescription::CONNECTION_BUS /*deprecated */; + } + return kSimulateOutput || device.type.type == AudioDeviceType::OUT_TELEPHONY_TX || + device.type.connection == AudioDeviceDescription::CONNECTION_BUS /*deprecated*/; +} + +StreamInPrimary::StreamInPrimary(StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones) + : StreamIn(std::move(context), microphones), + StreamPrimary(&mContextInstance, sinkMetadata), + StreamInHwGainHelper(&mContextInstance) {} + +ndk::ScopedAStatus StreamInPrimary::getHwGain(std::vector* _aidl_return) { + if (isStubStream()) { + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + if (mHwGains.empty()) { + float gain; + RETURN_STATUS_IF_ERROR(primary::PrimaryMixer::getInstance().getMicGain(&gain)); + _aidl_return->resize(mChannelCount, gain); + RETURN_STATUS_IF_ERROR(setHwGainImpl(*_aidl_return)); + } + return getHwGainImpl(_aidl_return); +} + +ndk::ScopedAStatus StreamInPrimary::setHwGain(const std::vector& in_channelGains) { + if (isStubStream()) { + LOG(DEBUG) << __func__ << ": gains " << ::android::internal::ToString(in_channelGains); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + auto currentGains = mHwGains; + RETURN_STATUS_IF_ERROR(setHwGainImpl(in_channelGains)); + if (in_channelGains.size() < 1) { + LOG(FATAL) << __func__ << ": unexpected gain vector size: " << in_channelGains.size(); + } + if (auto status = primary::PrimaryMixer::getInstance().setMicGain(in_channelGains[0]); + !status.isOk()) { + mHwGains = currentGains; + return status; + } + float gain; + RETURN_STATUS_IF_ERROR(primary::PrimaryMixer::getInstance().getMicGain(&gain)); + // Due to rounding errors, round trip conversions between percents and indexed values may not + // match. + if (gain != in_channelGains[0]) { + LOG(WARNING) << __func__ << ": unmatched gain: set: " << in_channelGains[0] + << ", from mixer: " << gain; + } + return ndk::ScopedAStatus::ok(); +} + +StreamOutPrimary::StreamOutPrimary(StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), + StreamPrimary(&mContextInstance, sourceMetadata), + StreamOutHwVolumeHelper(&mContextInstance) {} + +ndk::ScopedAStatus StreamOutPrimary::getHwVolume(std::vector* _aidl_return) { + if (isStubStream()) { + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + if (mHwVolumes.empty()) { + RETURN_STATUS_IF_ERROR(primary::PrimaryMixer::getInstance().getVolumes(_aidl_return)); + _aidl_return->resize(mChannelCount); + RETURN_STATUS_IF_ERROR(setHwVolumeImpl(*_aidl_return)); + } + return getHwVolumeImpl(_aidl_return); +} + +ndk::ScopedAStatus StreamOutPrimary::setHwVolume(const std::vector& in_channelVolumes) { + if (isStubStream()) { + LOG(DEBUG) << __func__ << ": volumes " << ::android::internal::ToString(in_channelVolumes); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + auto currentVolumes = mHwVolumes; + RETURN_STATUS_IF_ERROR(setHwVolumeImpl(in_channelVolumes)); + if (auto status = primary::PrimaryMixer::getInstance().setVolumes(in_channelVolumes); + !status.isOk()) { + mHwVolumes = currentVolumes; + return status; + } + std::vector volumes; + RETURN_STATUS_IF_ERROR(primary::PrimaryMixer::getInstance().getVolumes(&volumes)); + // Due to rounding errors, round trip conversions between percents and indexed values may not + // match. + if (volumes != in_channelVolumes) { + LOG(WARNING) << __func__ << ": unmatched volumes: set: " + << ::android::internal::ToString(in_channelVolumes) + << ", from mixer: " << ::android::internal::ToString(volumes); + } + return ndk::ScopedAStatus::ok(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/r_submix/ModuleRemoteSubmix.cpp b/audio/r_submix/ModuleRemoteSubmix.cpp new file mode 100644 index 0000000..b44f37b --- /dev/null +++ b/audio/r_submix/ModuleRemoteSubmix.cpp @@ -0,0 +1,183 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_ModuleRemoteSubmix" + +#include +#include + +#include +#include + +#include "SubmixRoute.h" +#include "core-impl/ModuleRemoteSubmix.h" +#include "core-impl/StreamRemoteSubmix.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::AudioIoFlags; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::AudioProfile; +using aidl::android::media::audio::common::Int; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +namespace { + +std::optional getRemoteEndConfig(const AudioPort& audioPort) { + const auto& deviceAddress = audioPort.ext.get().device.address; + const bool isInput = audioPort.flags.getTag() == AudioIoFlags::input; + if (auto submixRoute = r_submix::SubmixRoute::findRoute(deviceAddress); + submixRoute != nullptr) { + if ((isInput && submixRoute->isStreamOutOpen()) || + (!isInput && submixRoute->isStreamInOpen())) { + return submixRoute->getPipeConfig(); + } + } + return {}; +} + +} // namespace + +ndk::ScopedAStatus ModuleRemoteSubmix::getMicMute(bool* _aidl_return __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::setMicMute(bool in_mute __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::setAudioPortConfig(const AudioPortConfig& in_requested, + AudioPortConfig* out_suggested, + bool* _aidl_return) { + auto fillConfig = [this](const AudioPort& port, AudioPortConfig* config) { + if (port.ext.getTag() == AudioPortExt::device) { + if (auto pipeConfig = getRemoteEndConfig(port); pipeConfig.has_value()) { + LOG(DEBUG) << "setAudioPortConfig: suggesting port config from the remote end."; + config->format = pipeConfig->format; + config->channelMask = pipeConfig->channelLayout; + config->sampleRate = Int{.value = pipeConfig->sampleRate}; + config->flags = port.flags; + config->ext = port.ext; + return true; + } + } + return generateDefaultPortConfig(port, config); + }; + return Module::setAudioPortConfigImpl(in_requested, fillConfig, out_suggested, _aidl_return); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::createInputStream( + StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones, std::shared_ptr* result) { + return createStreamInstance(result, std::move(context), sinkMetadata, + microphones); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::createOutputStream( + StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, std::shared_ptr* result) { + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::populateConnectedDevicePort(AudioPort* audioPort, int32_t) { + if (audioPort->ext.getTag() != AudioPortExt::device) { + LOG(ERROR) << __func__ << ": not a device port: " << audioPort->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + // If there is already a pipe with a stream for the port address, provide its configuration as + // the only option. Otherwise, find the corresponding mix port and copy its profiles. + if (auto pipeConfig = getRemoteEndConfig(*audioPort); pipeConfig.has_value()) { + audioPort->profiles.clear(); + audioPort->profiles.push_back(AudioProfile{ + .format = pipeConfig->format, + .channelMasks = std::vector({pipeConfig->channelLayout}), + .sampleRates = std::vector({pipeConfig->sampleRate})}); + LOG(DEBUG) << __func__ << ": populated from remote end as: " << audioPort->toString(); + return ndk::ScopedAStatus::ok(); + } + + // At this moment, the port has the same ID as the template port, see connectExternalDevice. + std::vector routes = getAudioRoutesForAudioPortImpl(audioPort->id); + if (routes.empty()) { + LOG(ERROR) << __func__ << ": no routes found for the port " << audioPort->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + const auto& route = *routes.begin(); + AudioPort mixPort; + if (route->sinkPortId == audioPort->id) { + if (route->sourcePortIds.empty()) { + LOG(ERROR) << __func__ << ": invalid route " << route->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + RETURN_STATUS_IF_ERROR(getAudioPort(*route->sourcePortIds.begin(), &mixPort)); + } else { + RETURN_STATUS_IF_ERROR(getAudioPort(route->sinkPortId, &mixPort)); + } + audioPort->profiles = mixPort.profiles; + LOG(DEBUG) << __func__ << ": populated from the mix port as: " << audioPort->toString(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::checkAudioPatchEndpointsMatch( + const std::vector& sources, const std::vector& sinks) { + for (const auto& source : sources) { + for (const auto& sink : sinks) { + if (source->sampleRate != sink->sampleRate || + source->channelMask != sink->channelMask || source->format != sink->format) { + LOG(ERROR) << __func__ + << ": mismatch port configuration, source=" << source->toString() + << ", sink=" << sink->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::onMasterMuteChanged(bool __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleRemoteSubmix::onMasterVolumeChanged(float __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +int32_t ModuleRemoteSubmix::getNominalLatencyMs(const AudioPortConfig&) { + // See the note on kDefaultPipePeriodCount. + static constexpr int32_t kMaxLatencyMs = + (r_submix::kDefaultPipeSizeInFrames * 1000) / r_submix::kDefaultSampleRateHz; + static constexpr int32_t kMinLatencyMs = kMaxLatencyMs / r_submix::kDefaultPipePeriodCount; + return kMinLatencyMs; +} + +binder_status_t ModuleRemoteSubmix::dump(int fd, const char** /*args*/, uint32_t /*numArgs*/) { + dprintf(fd, "\nSubmixRoutes:\n%s\n", r_submix::SubmixRoute::dumpRoutes().c_str()); + return STATUS_OK; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/r_submix/StreamRemoteSubmix.cpp b/audio/r_submix/StreamRemoteSubmix.cpp new file mode 100644 index 0000000..cc3c644 --- /dev/null +++ b/audio/r_submix/StreamRemoteSubmix.cpp @@ -0,0 +1,412 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_StreamRemoteSubmix" +#include +#include +#include +#include + +#include "core-impl/StreamRemoteSubmix.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::hardware::audio::core::r_submix::SubmixRoute; +using aidl::android::media::audio::common::AudioDeviceAddress; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneDynamicInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +using deprecated::InnerStreamWrapper; +using deprecated::StreamCommonInterfaceEx; +using deprecated::StreamSwitcher; + +StreamRemoteSubmix::StreamRemoteSubmix(StreamContext* context, const Metadata& metadata, + const AudioDeviceAddress& deviceAddress) + : StreamCommonImpl(context, metadata), + mDeviceAddress(deviceAddress), + mIsInput(isInput(metadata)) { + mStreamConfig.frameSize = context->getFrameSize(); + mStreamConfig.format = context->getFormat(); + mStreamConfig.channelLayout = context->getChannelLayout(); + mStreamConfig.sampleRate = context->getSampleRate(); +} + +StreamRemoteSubmix::~StreamRemoteSubmix() { + cleanupWorker(); +} + +::android::status_t StreamRemoteSubmix::init(DriverCallbackInterface*) { + mCurrentRoute = SubmixRoute::findOrCreateRoute(mDeviceAddress, mStreamConfig); + if (mCurrentRoute == nullptr) { + return ::android::NO_INIT; + } + if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) { + LOG(ERROR) << __func__ << ": invalid stream config"; + return ::android::NO_INIT; + } + sp sink = mCurrentRoute->getSink(); + if (sink == nullptr) { + LOG(ERROR) << __func__ << ": nullptr sink when opening stream"; + return ::android::NO_INIT; + } + if ((!mIsInput || mCurrentRoute->isStreamInOpen()) && sink->isShutdown()) { + LOG(DEBUG) << __func__ << ": Shut down sink when opening stream"; + if (::android::OK != mCurrentRoute->resetPipe()) { + LOG(ERROR) << __func__ << ": reset pipe failed"; + return ::android::NO_INIT; + } + } + mCurrentRoute->openStream(mIsInput); + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) { + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::flush() { + // TODO(b/372951987): consider if this needs to be done from 'StreamInWorkerLogic::cycle'. + return mIsInput ? standby() : ::android::OK; +} + +::android::status_t StreamRemoteSubmix::pause() { + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::standby() { + mCurrentRoute->standby(mIsInput); + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::start() { + mCurrentRoute->exitStandby(mIsInput); + mStartTimeNs = ::android::uptimeNanos(); + mFramesSinceStart = 0; + return ::android::OK; +} + +ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() { + if (!mIsInput) { + std::shared_ptr route = SubmixRoute::findRoute(mDeviceAddress); + if (route != nullptr) { + sp sink = route->getSink(); + if (sink == nullptr) { + ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink"; + + sink->shutdown(true); + // The client already considers this stream as closed, release the output end. + route->closeStream(mIsInput); + } else { + LOG(DEBUG) << __func__ << ": stream already closed."; + ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + } + return ndk::ScopedAStatus::ok(); +} + +// Remove references to the specified input and output streams. When the device no longer +// references input and output streams destroy the associated pipe. +void StreamRemoteSubmix::shutdown() { + mCurrentRoute->closeStream(mIsInput); + // If all stream instances are closed, we can remove route information for this port. + if (!mCurrentRoute->hasAtleastOneStreamOpen()) { + mCurrentRoute->releasePipe(); + LOG(DEBUG) << __func__ << ": pipe destroyed"; + SubmixRoute::removeRoute(mDeviceAddress); + } + mCurrentRoute.reset(); +} + +::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount, + size_t* actualFrameCount, int32_t* latencyMs) { + *latencyMs = getDelayInUsForFrameCount(getStreamPipeSizeInFrames()) / 1000; + LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms"; + mCurrentRoute->exitStandby(mIsInput); + ::android::status_t status = mIsInput ? inRead(buffer, frameCount, actualFrameCount) + : outWrite(buffer, frameCount, actualFrameCount); + if ((status != ::android::OK && mIsInput) || + ((status != ::android::OK && status != ::android::DEAD_OBJECT) && !mIsInput)) { + return status; + } + mFramesSinceStart += *actualFrameCount; + if (!mIsInput && status != ::android::DEAD_OBJECT) return ::android::OK; + // Input streams always need to block, output streams need to block when there is no sink. + // When the sink exists, more sophisticated blocking algorithm is implemented by MonoPipe. + const long bufferDurationUs = + (*actualFrameCount) * MICROS_PER_SECOND / mContext.getSampleRate(); + const auto totalDurationUs = (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND; + const long totalOffsetUs = + mFramesSinceStart * MICROS_PER_SECOND / mContext.getSampleRate() - totalDurationUs; + LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs; + if (totalOffsetUs > 0) { + const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs); + LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us"; + usleep(sleepTimeUs); + } + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::refinePosition(StreamDescriptor::Position* position) { + sp source = mCurrentRoute->getSource(); + if (source == nullptr) { + return ::android::NO_INIT; + } + const ssize_t framesInPipe = source->availableToRead(); + if (framesInPipe <= 0) { + // No need to update the position frames + return ::android::OK; + } + if (mIsInput) { + position->frames += framesInPipe; + } else if (position->frames >= framesInPipe) { + position->frames -= framesInPipe; + } + return ::android::OK; +} + +long StreamRemoteSubmix::getDelayInUsForFrameCount(size_t frameCount) { + return frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate; +} + +// Calculate the maximum size of the pipe buffer in frames for the specified stream. +size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() { + auto pipeConfig = mCurrentRoute->getPipeConfig(); + const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize); + return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize; +} + +::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount, + size_t* actualFrameCount) { + sp sink = mCurrentRoute->getSink(); + if (sink != nullptr) { + if (sink->isShutdown()) { + sink.clear(); + if (++mWriteShutdownCount < kMaxErrorLogs) { + LOG(DEBUG) << __func__ << ": pipe shutdown, ignoring the write. (limited logging)"; + } + *actualFrameCount = frameCount; + return ::android::DEAD_OBJECT; // Induce wait in `transfer`. + } + } else { + LOG(FATAL) << __func__ << ": without a pipe!"; + return ::android::UNKNOWN_ERROR; + } + mWriteShutdownCount = 0; + + LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount + << " frames"; + + const bool shouldBlockWrite = mCurrentRoute->shouldBlockWrite(); + size_t availableToWrite = sink->availableToWrite(); + // NOTE: sink has been checked above and sink and source life cycles are synchronized + sp source = mCurrentRoute->getSource(); + // If the write to the sink should be blocked, flush enough frames from the pipe to make space + // to write the most recent data. + if (!shouldBlockWrite && availableToWrite < frameCount) { + static uint8_t flushBuffer[64]; + const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize; + size_t framesToFlushFromSource = frameCount - availableToWrite; + LOG(DEBUG) << __func__ << ": flushing " << framesToFlushFromSource + << " frames from the pipe to avoid blocking"; + while (framesToFlushFromSource) { + const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames); + framesToFlushFromSource -= flushSize; + // read does not block + source->read(flushBuffer, flushSize); + } + } + availableToWrite = sink->availableToWrite(); + + if (!shouldBlockWrite && frameCount > availableToWrite) { + LOG(WARNING) << __func__ << ": writing " << availableToWrite << " vs. requested " + << frameCount; + // Truncate the request to avoid blocking. + frameCount = availableToWrite; + } + ssize_t writtenFrames = sink->write(buffer, frameCount); + if (writtenFrames < 0) { + if (writtenFrames == (ssize_t)::android::NEGOTIATE) { + LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE"; + sink.clear(); + *actualFrameCount = 0; + return ::android::UNKNOWN_ERROR; + } else { + // write() returned UNDERRUN or WOULD_BLOCK, retry + LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames; + writtenFrames = sink->write(buffer, frameCount); + } + } + + if (writtenFrames < 0) { + LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames; + *actualFrameCount = 0; + return ::android::UNKNOWN_ERROR; + } + if (writtenFrames > 0 && frameCount > (size_t)writtenFrames) { + LOG(WARNING) << __func__ << ": wrote " << writtenFrames << " vs. requested " << frameCount; + } + *actualFrameCount = writtenFrames; + return ::android::OK; +} + +::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount, + size_t* actualFrameCount) { + // in any case, it is emulated that data for the entire buffer was available + memset(buffer, 0, mStreamConfig.frameSize * frameCount); + *actualFrameCount = frameCount; + + // about to read from audio source + sp source = mCurrentRoute->getSource(); + if (source == nullptr) { + if (++mReadErrorCount < kMaxErrorLogs) { + LOG(ERROR) << __func__ + << ": no audio pipe yet we're trying to read! (not all errors will be " + "logged)"; + } + return ::android::OK; + } + // get and hold the sink because 'MonoPipeReader' does not hold a strong pointer to it. + sp sink = mCurrentRoute->getSink(); + if (sink == nullptr) { + if (++mReadErrorCount < kMaxErrorLogs) { + LOG(ERROR) << __func__ + << ": the sink has been released! (not all errors will be logged)"; + } + return ::android::OK; + } + mReadErrorCount = 0; + + LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount + << " frames"; + // read the data from the pipe + char* buff = (char*)buffer; + size_t actuallyRead = 0; + long remainingFrames = frameCount; + // Try to wait as long as possible for the audio duration, but leave some time for the call to + // 'transfer' to complete. 'kReadAttemptSleepUs' is a good constant for this purpose because it + // is by definition "strictly inferior" to the typical buffer duration. + const long durationUs = + std::max(0L, getDelayInUsForFrameCount(frameCount) - kReadAttemptSleepUs); + const int64_t deadlineTimeNs = ::android::uptimeNanos() + durationUs * NANOS_PER_MICROSECOND; + while (remainingFrames > 0) { + ssize_t framesRead = source->read(buff, remainingFrames); + LOG(VERBOSE) << __func__ << ": frames read " << framesRead; + if (framesRead > 0) { + remainingFrames -= framesRead; + buff += framesRead * mStreamConfig.frameSize; + LOG(VERBOSE) << __func__ << ": got " << framesRead + << " frames, remaining =" << remainingFrames; + actuallyRead += framesRead; + } + if (::android::uptimeNanos() >= deadlineTimeNs) break; + if (framesRead <= 0) { + LOG(VERBOSE) << __func__ << ": read returned " << framesRead + << ", read failure, sleeping for " << kReadAttemptSleepUs << " us"; + usleep(kReadAttemptSleepUs); + } + } + if (actuallyRead < frameCount) { + if (++mReadFailureCount < kMaxReadFailureAttempts) { + LOG(WARNING) << __func__ << ": read " << actuallyRead << " vs. requested " << frameCount + << " (not all errors will be logged)"; + } + } else { + mReadFailureCount = 0; + } + mCurrentRoute->updateReadCounterFrames(*actualFrameCount); + return ::android::OK; +} + +StreamInRemoteSubmix::StreamInRemoteSubmix(StreamContext&& context, + const SinkMetadata& sinkMetadata, + const std::vector& microphones) + : StreamIn(std::move(context), microphones), StreamSwitcher(&mContextInstance, sinkMetadata) {} + +ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones( + std::vector* _aidl_return) { + LOG(DEBUG) << __func__ << ": not supported"; + *_aidl_return = std::vector(); + return ndk::ScopedAStatus::ok(); +} + +StreamSwitcher::DeviceSwitchBehavior StreamInRemoteSubmix::switchCurrentStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) { + // This implementation effectively postpones stream creation until + // receiving the first call to 'setConnectedDevices' with a non-empty list. + if (isStubStream()) { + if (devices.size() == 1) { + auto deviceDesc = devices.front().type; + if (deviceDesc.type == + ::aidl::android::media::audio::common::AudioDeviceType::IN_SUBMIX) { + return DeviceSwitchBehavior::CREATE_NEW_STREAM; + } + LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type) + << " not supported"; + } else { + LOG(ERROR) << __func__ << ": Only single device supported."; + } + return DeviceSwitchBehavior::UNSUPPORTED_DEVICES; + } + return DeviceSwitchBehavior::USE_CURRENT_STREAM; +} + +std::unique_ptr StreamInRemoteSubmix::createNewStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices, + StreamContext* context, const Metadata& metadata) { + return std::unique_ptr( + new InnerStreamWrapper(context, metadata, devices.front().address)); +} + +StreamOutRemoteSubmix::StreamOutRemoteSubmix(StreamContext&& context, + const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), + StreamSwitcher(&mContextInstance, sourceMetadata) {} + +StreamSwitcher::DeviceSwitchBehavior StreamOutRemoteSubmix::switchCurrentStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) { + // This implementation effectively postpones stream creation until + // receiving the first call to 'setConnectedDevices' with a non-empty list. + if (isStubStream()) { + if (devices.size() == 1) { + auto deviceDesc = devices.front().type; + if (deviceDesc.type == + ::aidl::android::media::audio::common::AudioDeviceType::OUT_SUBMIX) { + return DeviceSwitchBehavior::CREATE_NEW_STREAM; + } + LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type) + << " not supported"; + } else { + LOG(ERROR) << __func__ << ": Only single device supported."; + } + return DeviceSwitchBehavior::UNSUPPORTED_DEVICES; + } + return DeviceSwitchBehavior::USE_CURRENT_STREAM; +} + +std::unique_ptr StreamOutRemoteSubmix::createNewStream( + const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices, + StreamContext* context, const Metadata& metadata) { + return std::unique_ptr( + new InnerStreamWrapper(context, metadata, devices.front().address)); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/r_submix/SubmixRoute.cpp b/audio/r_submix/SubmixRoute.cpp new file mode 100644 index 0000000..445b1d3 --- /dev/null +++ b/audio/r_submix/SubmixRoute.cpp @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_SubmixRoute" +#include +#include + +#include + +#include "SubmixRoute.h" + +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::media::audio::common::AudioDeviceAddress; + +namespace aidl::android::hardware::audio::core::r_submix { + +// static +SubmixRoute::RoutesMonitor SubmixRoute::getRoutes(bool tryLock) { + static std::mutex submixRoutesLock; + static RoutesMap submixRoutes; + return !tryLock ? RoutesMonitor(submixRoutesLock, submixRoutes) + : RoutesMonitor(submixRoutesLock, submixRoutes, tryLock); +} + +// static +std::shared_ptr SubmixRoute::findOrCreateRoute(const AudioDeviceAddress& deviceAddress, + const AudioConfig& pipeConfig) { + auto routes = getRoutes(); + auto routeItr = routes->find(deviceAddress); + if (routeItr != routes->end()) { + return routeItr->second; + } + auto route = std::make_shared(); + if (::android::OK != route->createPipe(pipeConfig)) { + LOG(ERROR) << __func__ << ": create pipe failed"; + return nullptr; + } + routes->emplace(deviceAddress, route); + return route; +} + +// static +std::shared_ptr SubmixRoute::findRoute(const AudioDeviceAddress& deviceAddress) { + auto routes = getRoutes(); + auto routeItr = routes->find(deviceAddress); + if (routeItr != routes->end()) { + return routeItr->second; + } + return nullptr; +} + +// static +void SubmixRoute::removeRoute(const AudioDeviceAddress& deviceAddress) { + getRoutes()->erase(deviceAddress); +} + +// static +std::string SubmixRoute::dumpRoutes() { + auto routes = getRoutes(true /*tryLock*/); + std::string result; + if (routes->empty()) result.append(" "); + for (const auto& r : *(routes.operator->())) { + result.append(" - ") + .append(r.first.toString()) + .append(": ") + .append(r.second->dump()) + .append("\n"); + } + return result; +} + +// Verify a submix input or output stream can be opened. +bool SubmixRoute::isStreamConfigValid(bool isInput, const AudioConfig& streamConfig) { + // If the stream is already open, don't open it again. + // ENABLE_LEGACY_INPUT_OPEN is default behaviour + if (!isInput && isStreamOutOpen()) { + LOG(ERROR) << __func__ << ": output stream already open."; + return false; + } + // If either stream is open, verify the existing pipe config matches the stream config. + if (hasAtleastOneStreamOpen() && !isStreamConfigCompatible(streamConfig)) { + return false; + } + return true; +} + +// Compare this stream config with existing pipe config, returning false if they do *not* +// match, true otherwise. +bool SubmixRoute::isStreamConfigCompatible(const AudioConfig& streamConfig) { + std::lock_guard guard(mLock); + if (streamConfig.channelLayout != mPipeConfig.channelLayout) { + LOG(ERROR) << __func__ << ": channel count mismatch, stream channels = " + << streamConfig.channelLayout.toString() + << " pipe config channels = " << mPipeConfig.channelLayout.toString(); + return false; + } + if (streamConfig.sampleRate != mPipeConfig.sampleRate) { + LOG(ERROR) << __func__ + << ": sample rate mismatch, stream sample rate = " << streamConfig.sampleRate + << " pipe config sample rate = " << mPipeConfig.sampleRate; + return false; + } + if (streamConfig.format != mPipeConfig.format) { + LOG(ERROR) << __func__ + << ": format mismatch, stream format = " << streamConfig.format.toString() + << " pipe config format = " << mPipeConfig.format.toString(); + return false; + } + return true; +} + +bool SubmixRoute::hasAtleastOneStreamOpen() { + std::lock_guard guard(mLock); + return (mStreamInOpen || mStreamOutOpen); +} + +// We DO NOT block if: +// - no peer input stream is present +// - the peer input is in standby AFTER having been active. +// We DO block if: +// - the input was never activated to avoid discarding first frames in the pipe in case capture +// start was delayed +bool SubmixRoute::shouldBlockWrite() { + std::lock_guard guard(mLock); + return mStreamInOpen && (!mStreamInStandby || mReadCounterFrames == 0); +} + +long SubmixRoute::updateReadCounterFrames(size_t frameCount) { + std::lock_guard guard(mLock); + mReadCounterFrames += frameCount; + return mReadCounterFrames; +} + +void SubmixRoute::openStream(bool isInput) { + std::lock_guard guard(mLock); + if (isInput) { + if (mStreamInOpen) { + mInputRefCount++; + } else { + mInputRefCount = 1; + mStreamInOpen = true; + } + mStreamInStandby = true; + mReadCounterFrames = 0; + if (mSink != nullptr) { + mSink->shutdown(false); + } + } else { + mStreamOutOpen = true; + } +} + +void SubmixRoute::closeStream(bool isInput) { + std::lock_guard guard(mLock); + if (isInput) { + if (--mInputRefCount == 0) { + mStreamInOpen = false; + if (mSink != nullptr) { + mSink->shutdown(true); + } + } + } else { + mStreamOutOpen = false; + } +} + +// If SubmixRoute doesn't exist for a port, create a pipe for the submix audio device of size +// buffer_size_frames and store config of the submix audio device. +::android::status_t SubmixRoute::createPipe(const AudioConfig& streamConfig) { + const int channelCount = getChannelCount(streamConfig.channelLayout); + const audio_format_t audioFormat = VALUE_OR_RETURN_STATUS( + aidl2legacy_AudioFormatDescription_audio_format_t(streamConfig.format)); + const ::android::NBAIO_Format format = + ::android::Format_from_SR_C(streamConfig.sampleRate, channelCount, audioFormat); + const ::android::NBAIO_Format offers[1] = {format}; + size_t numCounterOffers = 0; + + const size_t pipeSizeInFrames = + r_submix::kDefaultPipeSizeInFrames * + ((float)streamConfig.sampleRate / r_submix::kDefaultSampleRateHz); + LOG(VERBOSE) << __func__ << ": creating pipe, rate : " << streamConfig.sampleRate + << ", pipe size : " << pipeSizeInFrames; + + // Create a MonoPipe with optional blocking set to true. + sp sink = sp::make(pipeSizeInFrames, format, true /*writeCanBlock*/); + if (sink == nullptr) { + LOG(FATAL) << __func__ << ": sink is null"; + return ::android::UNEXPECTED_NULL; + } + + // Negotiation between the source and sink cannot fail as the device open operation + // creates both ends of the pipe using the same audio format. + ssize_t index = sink->negotiate(offers, 1, nullptr, numCounterOffers); + if (index != 0) { + LOG(FATAL) << __func__ << ": Negotiation for the sink failed, index = " << index; + return ::android::BAD_INDEX; + } + sp source = sp::make(sink.get()); + if (source == nullptr) { + LOG(FATAL) << __func__ << ": source is null"; + return ::android::UNEXPECTED_NULL; + } + numCounterOffers = 0; + index = source->negotiate(offers, 1, nullptr, numCounterOffers); + if (index != 0) { + LOG(FATAL) << __func__ << ": Negotiation for the source failed, index = " << index; + return ::android::BAD_INDEX; + } + LOG(VERBOSE) << __func__ << ": Pipe frame size : " << streamConfig.frameSize + << ", pipe frames : " << sink->maxFrames(); + + // Save references to the source and sink. + { + std::lock_guard guard(mLock); + mPipeConfig = streamConfig; + mPipeConfig.frameCount = sink->maxFrames(); + mSink = std::move(sink); + mSource = std::move(source); + } + + return ::android::OK; +} + +// Release references to the sink and source. +AudioConfig SubmixRoute::releasePipe() { + std::lock_guard guard(mLock); + mSink.clear(); + mSource.clear(); + return mPipeConfig; +} + +::android::status_t SubmixRoute::resetPipe() { + return createPipe(releasePipe()); +} + +void SubmixRoute::standby(bool isInput) { + std::lock_guard guard(mLock); + + if (isInput) { + mStreamInStandby = true; + } else if (!mStreamOutStandby) { + mStreamOutStandby = true; + mStreamOutStandbyTransition = true; + } +} + +void SubmixRoute::exitStandby(bool isInput) { + std::lock_guard guard(mLock); + + if (isInput) { + if (mStreamInStandby || mStreamOutStandbyTransition) { + mStreamInStandby = false; + mStreamOutStandbyTransition = false; + mReadCounterFrames = 0; + } + } else { + if (mStreamOutStandby) { + mStreamOutStandby = false; + mStreamOutStandbyTransition = true; + } + } +} + +std::string SubmixRoute::dump() NO_THREAD_SAFETY_ANALYSIS { + const bool isLocked = mLock.try_lock(); + std::string result = std::string(isLocked ? "" : "! ") + .append("Input ") + .append(mStreamInOpen ? "open" : "closed") + .append(mStreamInStandby ? ", standby" : ", active") + .append(", refcount: ") + .append(std::to_string(mInputRefCount)) + .append(", framesRead: ") + .append(mSource ? std::to_string(mSource->framesRead()) : "") + .append("; Output ") + .append(mStreamOutOpen ? "open" : "closed") + .append(mStreamOutStandby ? ", standby" : ", active") + .append(", framesWritten: ") + .append(mSink ? std::to_string(mSink->framesWritten()) : ""); + if (isLocked) mLock.unlock(); + return result; +} + +} // namespace aidl::android::hardware::audio::core::r_submix diff --git a/audio/r_submix/SubmixRoute.h b/audio/r_submix/SubmixRoute.h new file mode 100644 index 0000000..5425f12 --- /dev/null +++ b/audio/r_submix/SubmixRoute.h @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include + +#include +#include + +#include +#include + +#include +#include +#include + +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioFormatDescription; +using aidl::android::media::audio::common::AudioFormatType; +using aidl::android::media::audio::common::PcmType; +using ::android::MonoPipe; +using ::android::MonoPipeReader; +using ::android::sp; + +namespace aidl::android::hardware::audio::core::r_submix { + +static constexpr int kDefaultSampleRateHz = 48000; +// Value used to divide the MonoPipe buffer into segments that are written to the source and +// read from the sink. The maximum latency of the device is the size of the MonoPipe's buffer +// the minimum latency is the MonoPipe buffer size divided by this value. +static constexpr int kDefaultPipePeriodCount = 4; +// Size at the default sample rate +// NOTE: This value will be rounded up to the nearest power of 2 by MonoPipe. +static constexpr int kDefaultPipeSizeInFrames = 1024 * kDefaultPipePeriodCount; + +// Configuration of the audio stream. +struct AudioConfig { + int sampleRate = kDefaultSampleRateHz; + AudioFormatDescription format = + AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT}; + AudioChannelLayout channelLayout = + AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_STEREO); + size_t frameSize; + size_t frameCount; +}; + +class SubmixRoute { + public: + static std::shared_ptr findOrCreateRoute( + const ::aidl::android::media::audio::common::AudioDeviceAddress& deviceAddress, + const AudioConfig& pipeConfig); + static std::shared_ptr findRoute( + const ::aidl::android::media::audio::common::AudioDeviceAddress& deviceAddress); + static void removeRoute( + const ::aidl::android::media::audio::common::AudioDeviceAddress& deviceAddress); + static std::string dumpRoutes(); + + bool isStreamInOpen() { + std::lock_guard guard(mLock); + return mStreamInOpen; + } + bool getStreamInStandby() { + std::lock_guard guard(mLock); + return mStreamInStandby; + } + bool isStreamOutOpen() { + std::lock_guard guard(mLock); + return mStreamOutOpen; + } + bool getStreamOutStandby() { + std::lock_guard guard(mLock); + return mStreamOutStandby; + } + long getReadCounterFrames() { + std::lock_guard guard(mLock); + return mReadCounterFrames; + } + sp getSink() { + std::lock_guard guard(mLock); + return mSink; + } + sp getSource() { + std::lock_guard guard(mLock); + return mSource; + } + AudioConfig getPipeConfig() { + std::lock_guard guard(mLock); + return mPipeConfig; + } + + bool isStreamConfigValid(bool isInput, const AudioConfig& streamConfig); + void closeStream(bool isInput); + ::android::status_t createPipe(const AudioConfig& streamConfig); + void exitStandby(bool isInput); + bool hasAtleastOneStreamOpen(); + int notifyReadError(); + void openStream(bool isInput); + AudioConfig releasePipe(); + ::android::status_t resetPipe(); + bool shouldBlockWrite(); + void standby(bool isInput); + long updateReadCounterFrames(size_t frameCount); + + std::string dump(); + + private: + using RoutesMap = std::map<::aidl::android::media::audio::common::AudioDeviceAddress, + std::shared_ptr>; + class RoutesMonitor { + public: + RoutesMonitor(std::mutex& mutex, RoutesMap& routes) : mLock(mutex), mRoutes(routes) {} + RoutesMonitor(std::mutex& mutex, RoutesMap& routes, bool /*tryLock*/) + : mLock(mutex, std::try_to_lock), mRoutes(routes) {} + RoutesMap* operator->() { return &mRoutes; } + + private: + std::unique_lock mLock; + RoutesMap& mRoutes; + }; + + static RoutesMonitor getRoutes(bool tryLock = false); + + bool isStreamConfigCompatible(const AudioConfig& streamConfig); + + std::mutex mLock; + AudioConfig mPipeConfig GUARDED_BY(mLock); + bool mStreamInOpen GUARDED_BY(mLock) = false; + int mInputRefCount GUARDED_BY(mLock) = 0; + bool mStreamInStandby GUARDED_BY(mLock) = true; + bool mStreamOutStandbyTransition GUARDED_BY(mLock) = false; + bool mStreamOutOpen GUARDED_BY(mLock) = false; + bool mStreamOutStandby GUARDED_BY(mLock) = true; + // how many frames have been requested to be read since standby + long mReadCounterFrames GUARDED_BY(mLock) = 0; + + // Pipe variables: they handle the ring buffer that "pipes" audio: + // - from the submix virtual audio output == what needs to be played + // remotely, seen as an output for the client + // - to the virtual audio source == what is captured by the component + // which "records" the submix / virtual audio source, and handles it as needed. + // A usecase example is one where the component capturing the audio is then sending it over + // Wifi for presentation on a remote Wifi Display device (e.g. a dongle attached to a TV, or a + // TV with Wifi Display capabilities), or to a wireless audio player. + sp mSink GUARDED_BY(mLock); + sp mSource GUARDED_BY(mLock); +}; + +} // namespace aidl::android::hardware::audio::core::r_submix diff --git a/audio/spatializer/Android.bp b/audio/spatializer/Android.bp new file mode 100644 index 0000000..2c229fe --- /dev/null +++ b/audio/spatializer/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libspatializersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "SpatializerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/spatializer/SpatializerSw.cpp b/audio/spatializer/SpatializerSw.cpp new file mode 100644 index 0000000..fd3c192 --- /dev/null +++ b/audio/spatializer/SpatializerSw.cpp @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_SpatializerSw" + +#include "SpatializerSw.h" + +#include +#include + +#include + +using aidl::android::hardware::audio::common::getChannelCount; +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidSpatializerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::SpatializerSw; +using aidl::android::hardware::audio::effect::State; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioUuid; +using aidl::android::media::audio::common::HeadTracking; +using aidl::android::media::audio::common::Spatialization; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidSpatializerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (!instanceSpp) { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } + + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidSpatializerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = SpatializerSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string SpatializerSw::kEffectName = "SpatializerSw"; + +const AudioChannelLayout kSupportedChannelMask = + AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_5POINT1); +const std::vector SpatializerSw::kRanges = { + MAKE_RANGE(Spatializer, supportedChannelLayout, {kSupportedChannelMask}, + {kSupportedChannelMask}), + MAKE_RANGE(Spatializer, spatializationLevel, Spatialization::Level::NONE, + Spatialization::Level::BED_PLUS_OBJECTS), + MAKE_RANGE(Spatializer, spatializationMode, Spatialization::Mode::BINAURAL, + Spatialization::Mode::TRANSAURAL), + MAKE_RANGE(Spatializer, headTrackingSensorId, std::numeric_limits::min(), + std::numeric_limits::max()), + MAKE_RANGE(Spatializer, headTrackingMode, HeadTracking::Mode::OTHER, + HeadTracking::Mode::RELATIVE_SCREEN), + MAKE_RANGE(Spatializer, headTrackingConnectionMode, + HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED, + HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL)}; +const Capability SpatializerSw::kCapability = {.range = {SpatializerSw::kRanges}}; +const Descriptor SpatializerSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidSpatializer(), + .uuid = getEffectImplUuidSpatializerSw()}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .hwAcceleratorMode = Flags::HardwareAccelerator::NONE}, + .name = SpatializerSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = SpatializerSw::kCapability}; + +ndk::ScopedAStatus SpatializerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus SpatializerSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::spatializer != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto& param = specific.get(); + RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + + return mContext->setParam(param.getTag(), param); +} + +ndk::ScopedAStatus SpatializerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::spatializerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto spatializerId = id.get(); + auto spatializerTag = spatializerId.getTag(); + switch (spatializerTag) { + case Spatializer::Id::commonTag: { + auto specificTag = spatializerId.get(); + std::optional param = mContext->getParam(specificTag); + if (!param.has_value()) { + return ndk::ScopedAStatus::fromExceptionCodeWithMessage( + EX_ILLEGAL_ARGUMENT, "SpatializerTagNotSupported"); + } + specific->set(param.value()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "SpatializerTagNotSupported"); + } + } + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr SpatializerSw::createContext(const Parameter::Common& common) { + if (common.input.base.channelMask != kSupportedChannelMask) { + LOG(ERROR) << __func__ + << " channelMask not supported: " << common.input.base.channelMask.toString(); + return nullptr; + } + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + return mContext; +} + +RetCode SpatializerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +SpatializerSw::~SpatializerSw() { + cleanUp(); + LOG(DEBUG) << __func__; +} + +// Processing method running in EffectWorker thread. +IEffect::Status SpatializerSw::effectProcessImpl(float* in, float* out, int samples) { + RETURN_VALUE_IF(!mContext, (IEffect::Status{EX_NULL_POINTER, 0, 0}), "nullContext"); + return mContext->process(in, out, samples); +} + +SpatializerSwContext::SpatializerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; +} + +SpatializerSwContext::~SpatializerSwContext() { + LOG(DEBUG) << __func__; +} + +template +std::optional SpatializerSwContext::getParam(TAG tag) { + if (mParamsMap.find(tag) != mParamsMap.end()) { + return mParamsMap.at(tag); + } + if (tag == Spatializer::supportedChannelLayout) { + return Spatializer::make( + {AudioChannelLayout::make( + AudioChannelLayout::LAYOUT_5POINT1)}); + } + return std::nullopt; +} + +template +ndk::ScopedAStatus SpatializerSwContext::setParam(TAG tag, Spatializer spatializer) { + RETURN_IF(tag == Spatializer::supportedChannelLayout, EX_ILLEGAL_ARGUMENT, + "supportedChannelLayoutGetOnly"); + + mParamsMap[tag] = spatializer; + return ndk::ScopedAStatus::ok(); +} + +IEffect::Status SpatializerSwContext::process(float* in, float* out, int samples) { + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0}; + + const auto inputChannelCount = getChannelCount(mCommon.input.base.channelMask); + const auto outputChannelCount = getChannelCount(mCommon.output.base.channelMask); + if (outputChannelCount < 2 || inputChannelCount < outputChannelCount) { + LOG(ERROR) << __func__ << " invalid channel count, in: " << inputChannelCount + << " out: " << outputChannelCount; + return status; + } + + int iFrames = samples / inputChannelCount; + for (int i = 0; i < iFrames; i++) { + std::memcpy(out, in, outputChannelCount); + in += inputChannelCount; + out += outputChannelCount; + } + return {STATUS_OK, static_cast(iFrames * inputChannelCount), + static_cast(iFrames * outputChannelCount)}; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/spatializer/SpatializerSw.h b/audio/spatializer/SpatializerSw.h new file mode 100644 index 0000000..b321e83 --- /dev/null +++ b/audio/spatializer/SpatializerSw.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "effect-impl/EffectContext.h" +#include "effect-impl/EffectImpl.h" + +#include + +#include +#include + +namespace aidl::android::hardware::audio::effect { + +class SpatializerSwContext final : public EffectContext { + public: + SpatializerSwContext(int statusDepth, const Parameter::Common& common); + ~SpatializerSwContext(); + + template + std::optional getParam(TAG tag); + template + ndk::ScopedAStatus setParam(TAG tag, Spatializer spatializer); + + IEffect::Status process(float* in, float* out, int samples); + + private: + std::unordered_map mParamsMap; +}; + +class SpatializerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + ~SpatializerSw(); + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + std::string getEffectName() override { return kEffectName; }; + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex) = nullptr; +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/stub/ApeHeader.cpp b/audio/stub/ApeHeader.cpp new file mode 100644 index 0000000..9112377 --- /dev/null +++ b/audio/stub/ApeHeader.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_OffloadStream" +#include + +#include "ApeHeader.h" + +namespace aidl::android::hardware::audio::core { + +static constexpr uint32_t kApeSignature1 = 0x2043414d; // 'MAC '; +static constexpr uint32_t kApeSignature2 = 0x4643414d; // 'MACF'; +static constexpr uint16_t kMinimumVersion = 3980; + +void* findApeHeader(void* buffer, size_t bufferSizeBytes, ApeHeader** header) { + auto advanceBy = [&](size_t bytes) -> void* { + buffer = static_cast(buffer) + bytes; + bufferSizeBytes -= bytes; + return buffer; + }; + + while (bufferSizeBytes >= sizeof(ApeDescriptor) + sizeof(ApeHeader)) { + ApeDescriptor* descPtr = static_cast(buffer); + if (descPtr->signature != kApeSignature1 && descPtr->signature != kApeSignature2) { + advanceBy(sizeof(descPtr->signature)); + continue; + } + if (descPtr->version < kMinimumVersion) { + LOG(ERROR) << __func__ << ": Unsupported APE version: " << descPtr->version + << ", minimum supported version: " << kMinimumVersion; + // Older versions only have a header, which is of the size similar to the modern header. + advanceBy(sizeof(ApeHeader)); + continue; + } + if (descPtr->descriptorSizeBytes > bufferSizeBytes) { + LOG(ERROR) << __func__ + << ": Invalid APE descriptor size: " << descPtr->descriptorSizeBytes + << ", overruns remaining buffer size: " << bufferSizeBytes; + advanceBy(sizeof(ApeDescriptor)); + continue; + } + advanceBy(descPtr->descriptorSizeBytes); + if (sizeof(ApeHeader) > bufferSizeBytes) { + LOG(ERROR) << __func__ << ": APE header is incomplete, want: " << sizeof(ApeHeader) + << " bytes, have: " << bufferSizeBytes; + return nullptr; + } + *header = static_cast(buffer); + return advanceBy(sizeof(ApeHeader)); + } + return nullptr; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/ApeHeader.h b/audio/stub/ApeHeader.h new file mode 100644 index 0000000..df30335 --- /dev/null +++ b/audio/stub/ApeHeader.h @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +namespace aidl::android::hardware::audio::core { + +// Simplified APE (Monkey Audio) header definition sufficient to figure out +// the basic parameters of the encoded file. Only supports the "current" +// versions of the header (>= 3980). + +#pragma pack(push, 4) + +// Only the beginning of the descriptor is needed to find the header which +// follows the descriptor. +struct ApeDescriptor { + uint32_t signature; // 'MAC ' or 'MACF' + uint16_t version; + uint16_t padding; + uint32_t descriptorSizeBytes; + uint32_t headerSizeBytes; +}; + +struct ApeHeader { + uint16_t compressionLevel; + uint16_t flags; + uint32_t blocksPerFrame; // "frames" are encoder frames, while "blocks" are audio frames + uint32_t lastFrameBlocks; // number of "blocks" in the last encoder "frame" + uint32_t totalFrames; // total number of encoder "frames" + uint16_t bitsPerSample; + uint16_t channelCount; + uint32_t sampleRate; +}; + +#pragma pack(pop) + +// Tries to find APE descriptor and header in the buffer. Returns the position +// after the header or nullptr if it was not found. +void* findApeHeader(void* buffer, size_t bufferSizeBytes, ApeHeader** header); + +// Clip duration in audio frames ("blocks" in the APE terminology). +inline int64_t getApeClipDurationFrames(const ApeHeader* header) { + return header->totalFrames != 0 + ? (header->totalFrames - 1) * header->blocksPerFrame + header->lastFrameBlocks + : 0; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/DriverStubImpl.cpp b/audio/stub/DriverStubImpl.cpp new file mode 100644 index 0000000..cb8ee70 --- /dev/null +++ b/audio/stub/DriverStubImpl.cpp @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_Stream" +#include +#include + +#include "core-impl/DriverStubImpl.h" + +namespace aidl::android::hardware::audio::core { + +DriverStubImpl::DriverStubImpl(const StreamContext& context, int asyncSleepTimeUs) + : mBufferSizeFrames(context.getBufferSizeInFrames()), + mFrameSizeBytes(context.getFrameSize()), + mSampleRate(context.getSampleRate()), + mIsAsynchronous(!!context.getAsyncCallback()), + mIsInput(context.isInput()), + mMixPortHandle(context.getMixPortHandle()), + mAsyncSleepTimeUs(asyncSleepTimeUs) {} + +#define LOG_ENTRY() \ + LOG(DEBUG) << "[" << (mIsInput ? "in" : "out") << "|ioHandle:" << mMixPortHandle << "] " \ + << __func__; + +::android::status_t DriverStubImpl::init(DriverCallbackInterface* /*callback*/) { + LOG_ENTRY(); + mIsInitialized = true; + return ::android::OK; +} + +::android::status_t DriverStubImpl::drain(StreamDescriptor::DrainMode) { + LOG_ENTRY(); + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + if (!mIsInput) { + if (!mIsAsynchronous) { + static constexpr float kMicrosPerSecond = MICROS_PER_SECOND; + const size_t delayUs = static_cast( + std::roundf(mBufferSizeFrames * kMicrosPerSecond / mSampleRate)); + usleep(delayUs); + } else if (mAsyncSleepTimeUs) { + usleep(mAsyncSleepTimeUs); + } + } + return ::android::OK; +} + +::android::status_t DriverStubImpl::flush() { + LOG_ENTRY(); + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + return ::android::OK; +} + +::android::status_t DriverStubImpl::pause() { + LOG_ENTRY(); + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + return ::android::OK; +} + +::android::status_t DriverStubImpl::standby() { + LOG_ENTRY(); + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + mIsStandby = true; + return ::android::OK; +} + +::android::status_t DriverStubImpl::start() { + LOG_ENTRY(); + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + mIsStandby = false; + mStartTimeNs = ::android::uptimeNanos(); + mFramesSinceStart = 0; + return ::android::OK; +} + +::android::status_t DriverStubImpl::transfer(void* buffer, size_t frameCount, + size_t* actualFrameCount, int32_t*) { + // No LOG_ENTRY as this is called very often. + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + if (mIsStandby) { + LOG(FATAL) << __func__ << ": must not happen while in standby"; + } + *actualFrameCount = frameCount; + if (mIsAsynchronous) { + if (mAsyncSleepTimeUs) usleep(mAsyncSleepTimeUs); + } else { + mFramesSinceStart += *actualFrameCount; + const long bufferDurationUs = (*actualFrameCount) * MICROS_PER_SECOND / mSampleRate; + const auto totalDurationUs = + (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND; + const long totalOffsetUs = + mFramesSinceStart * MICROS_PER_SECOND / mSampleRate - totalDurationUs; + LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs; + if (totalOffsetUs > 0) { + const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs); + LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us"; + usleep(sleepTimeUs); + } + } + if (mIsInput) { + uint8_t* byteBuffer = static_cast(buffer); + for (size_t i = 0; i < frameCount * mFrameSizeBytes; ++i) { + byteBuffer[i] = std::rand() % 255; + } + } + return ::android::OK; +} + +void DriverStubImpl::shutdown() { + LOG_ENTRY(); + mIsInitialized = false; +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/ModuleStub.cpp b/audio/stub/ModuleStub.cpp new file mode 100644 index 0000000..9f6e0b4 --- /dev/null +++ b/audio/stub/ModuleStub.cpp @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_ModuleStub" +#include +#include + +#include "core-impl/Bluetooth.h" +#include "core-impl/ModuleStub.h" +#include "core-impl/StreamStub.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +ndk::ScopedAStatus ModuleStub::getBluetooth(std::shared_ptr* _aidl_return) { + if (!mBluetooth) { + mBluetooth = ndk::SharedRefBase::make(); + } + *_aidl_return = mBluetooth.getInstance(); + LOG(DEBUG) << __func__ + << ": returning instance of IBluetooth: " << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleStub::getBluetoothA2dp(std::shared_ptr* _aidl_return) { + if (!mBluetoothA2dp) { + mBluetoothA2dp = ndk::SharedRefBase::make(); + } + *_aidl_return = mBluetoothA2dp.getInstance(); + LOG(DEBUG) << __func__ << ": returning instance of IBluetoothA2dp: " + << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleStub::getBluetoothLe(std::shared_ptr* _aidl_return) { + if (!mBluetoothLe) { + mBluetoothLe = ndk::SharedRefBase::make(); + } + *_aidl_return = mBluetoothLe.getInstance(); + LOG(DEBUG) << __func__ + << ": returning instance of IBluetoothLe: " << _aidl_return->get()->asBinder().get(); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleStub::createInputStream(StreamContext&& context, + const SinkMetadata& sinkMetadata, + const std::vector& microphones, + std::shared_ptr* result) { + return createStreamInstance(result, std::move(context), sinkMetadata, + microphones); +} + +ndk::ScopedAStatus ModuleStub::createOutputStream( + StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, std::shared_ptr* result) { + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/StreamMmapStub.cpp b/audio/stub/StreamMmapStub.cpp new file mode 100644 index 0000000..f48aea4 --- /dev/null +++ b/audio/stub/StreamMmapStub.cpp @@ -0,0 +1,286 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_MmapStream" +#include +#include +#include +#include + +#include "core-impl/StreamMmapStub.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +namespace mmap { + +std::string DspSimulatorLogic::init() { + { + std::lock_guard l(mSharedState.lock); + mSharedState.mmapPos.timeNs = StreamDescriptor::Position::UNKNOWN; + mSharedState.mmapPos.frames = StreamDescriptor::Position::UNKNOWN; + } + // Progress in buffer size chunks to make sure that VTS tolerates infrequent position updates + // (see b/350998390). + mCycleDurationUs = (mSharedState.bufferSizeBytes / mSharedState.frameSizeBytes) * + MICROS_PER_SECOND / mSharedState.sampleRate; + return ""; +} + +DspSimulatorLogic::Status DspSimulatorLogic::cycle() { + // Simulate DSP moving along in real time. + const int64_t timeBeginNs = ::android::uptimeNanos(); + usleep(mCycleDurationUs); + int64_t newFrames; + std::lock_guard l(mSharedState.lock); + if (mMemBegin != mSharedState.sharedMemory) { + mMemBegin = mSharedState.sharedMemory; + if (mMemBegin != nullptr) mMemPos = mMemBegin; + } + if (mMemBegin != nullptr) { + mSharedState.mmapPos.timeNs = ::android::uptimeNanos(); + newFrames = (mSharedState.mmapPos.timeNs - timeBeginNs) * mSharedState.sampleRate / + NANOS_PER_SECOND; + // Restore the reported frames position to ensure continuity. + if (mSharedState.mmapPos.frames == StreamDescriptor::Position::UNKNOWN) { + mSharedState.mmapPos.frames = mLastFrames; + } + mSharedState.mmapPos.frames += newFrames; + mLastFrames = mSharedState.mmapPos.frames; + if (mSharedState.isInput) { + for (size_t i = 0; i < static_cast(newFrames) * mSharedState.frameSizeBytes; + ++i) { + *mMemPos++ = std::rand() % 255; + if (mMemPos >= mMemBegin + mSharedState.bufferSizeBytes) mMemPos = mMemBegin; + } + } + } else { + LOG(WARNING) << "No shared memory but the DSP is active"; + mSharedState.mmapPos.timeNs = StreamDescriptor::Position::UNKNOWN; + mSharedState.mmapPos.frames = StreamDescriptor::Position::UNKNOWN; + } + return Status::CONTINUE; +} + +} // namespace mmap + +using mmap::DspSimulatorState; + +DriverMmapStubImpl::DriverMmapStubImpl(const StreamContext& context) + : DriverStubImpl(context, 0 /*asyncSleepTimeUs*/), + mState{mIsInput, mSampleRate, static_cast(mFrameSizeBytes), + mBufferSizeFrames * mFrameSizeBytes}, + mDspWorker(mState) { + LOG_IF(FATAL, !context.isMmap()) << "The steam must be used in MMAP mode"; +} + +::android::status_t DriverMmapStubImpl::init(DriverCallbackInterface* callback) { + RETURN_STATUS_IF_ERROR(DriverStubImpl::init(callback)); + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::drain(StreamDescriptor::DrainMode drainMode) { + RETURN_STATUS_IF_ERROR(DriverStubImpl::drain(drainMode)); + mDspWorker.pause(); + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::pause() { + RETURN_STATUS_IF_ERROR(DriverStubImpl::pause()); + mDspWorker.pause(); + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::start() { + RETURN_STATUS_IF_ERROR(DriverStubImpl::start()); + RETURN_STATUS_IF_ERROR(startWorkerIfNeeded()); + mDspWorker.resume(); + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::transfer(void*, size_t, size_t*, int32_t*) { + // Do not call into DriverStubImpl::transfer + if (!mIsInitialized) { + LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver"; + } + if (mIsStandby) { + LOG(FATAL) << __func__ << ": must not happen while in standby"; + } + RETURN_STATUS_IF_ERROR(startWorkerIfNeeded()); + mDspWorker.resume(); + return ::android::OK; +} + +void DriverMmapStubImpl::shutdown() { + LOG(DEBUG) << __func__ << ": stopping the DSP simulator worker"; + mDspWorker.stop(); + std::lock_guard l(mState.lock); + releaseSharedMemory(); + DriverStubImpl::shutdown(); +} + +::android::status_t DriverMmapStubImpl::initSharedMemory(int ashmemFd) { + { + std::lock_guard l(mState.lock); + if (ashmemFd == -1) { + mState.sharedMemory = nullptr; + return ::android::BAD_VALUE; + } + RETURN_STATUS_IF_ERROR(releaseSharedMemory()); + } + uint8_t* sharedMemory = static_cast(::mmap( + nullptr, mState.bufferSizeBytes, PROT_READ | PROT_WRITE, MAP_SHARED, ashmemFd, 0)); + if (sharedMemory == reinterpret_cast(MAP_FAILED) || sharedMemory == nullptr) { + PLOG(ERROR) << "mmap failed for size " << mState.bufferSizeBytes << ", fd " << ashmemFd; + return ::android::NO_INIT; + } + std::lock_guard l(mState.lock); + mState.sharedMemory = sharedMemory; + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::releaseSharedMemory() { + if (mState.sharedMemory != nullptr) { + LOG(DEBUG) << __func__ << ": unmapping shared memory"; + if (munmap(mState.sharedMemory, mState.bufferSizeBytes) != 0) { + PLOG(ERROR) << "munmap failed for size " << mState.bufferSizeBytes; + return ::android::INVALID_OPERATION; + } + mState.sharedMemory = nullptr; + } + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::startWorkerIfNeeded() { + if (!mDspWorkerStarted) { + // This is an "audio service thread," must have elevated priority. + if (!mDspWorker.start("dsp_sim", ANDROID_PRIORITY_URGENT_AUDIO)) { + return ::android::NO_INIT; + } + mDspWorkerStarted = true; + } + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::refinePosition(StreamDescriptor::Position* position) { + std::lock_guard l(mState.lock); + *position = mState.mmapPos; + return ::android::OK; +} + +::android::status_t DriverMmapStubImpl::getMmapPositionAndLatency( + StreamDescriptor::Position* position, int32_t* latencyMs) { + { + std::lock_guard l(mState.lock); + *position = mState.mmapPos; + } + const size_t latencyFrames = mBufferSizeFrames / 2; + if (position->frames != StreamDescriptor::Position::UNKNOWN) { + position->frames += latencyFrames; + } + *latencyMs = latencyFrames * MILLIS_PER_SECOND / mSampleRate; + return ::android::OK; +} + +const std::string StreamMmapStub::kCreateMmapBufferName = "aosp.createMmapBuffer"; + +StreamMmapStub::StreamMmapStub(StreamContext* context, const Metadata& metadata) + : StreamCommonImpl(context, metadata), DriverMmapStubImpl(getContext()) {} + +StreamMmapStub::~StreamMmapStub() { + cleanupWorker(); +} + +ndk::ScopedAStatus StreamMmapStub::getVendorParameters(const std::vector& in_ids, + std::vector* _aidl_return) { + std::vector unprocessedIds; + for (const auto& id : in_ids) { + if (id == kCreateMmapBufferName) { + LOG(DEBUG) << __func__ << ": " << id; + MmapBufferDescriptor mmapDesc; + RETURN_STATUS_IF_ERROR(createMmapBuffer(&mmapDesc)); + VendorParameter createMmapBuffer{.id = id}; + createMmapBuffer.ext.setParcelable(mmapDesc); + LOG(DEBUG) << __func__ << ": returning " << mmapDesc.toString(); + _aidl_return->push_back(std::move(createMmapBuffer)); + } else { + unprocessedIds.push_back(id); + } + } + if (!unprocessedIds.empty()) { + return StreamCommonImpl::getVendorParameters(unprocessedIds, _aidl_return); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamMmapStub::setVendorParameters( + const std::vector& in_parameters, bool in_async) { + std::vector unprocessedParameters; + for (const auto& param : in_parameters) { + if (param.id == kCreateMmapBufferName) { + LOG(DEBUG) << __func__ << ": " << param.id; + // The value is irrelevant. The fact that this parameter can be "set" is an + // indication that the method can be used by the client via 'getVendorParameters'. + } else { + unprocessedParameters.push_back(param); + } + } + if (!unprocessedParameters.empty()) { + return StreamCommonImpl::setVendorParameters(unprocessedParameters, in_async); + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus StreamMmapStub::createMmapBuffer(MmapBufferDescriptor* desc) { + const size_t bufferSizeFrames = mContext.getBufferSizeInFrames(); + const size_t bufferSizeBytes = static_cast(bufferSizeFrames) * mContext.getFrameSize(); + const std::string regionName = + std::string("mmap-sim-") + std::to_string(mContext.getMixPortHandle()); + int fd = ashmem_create_region(regionName.c_str(), bufferSizeBytes); + if (fd < 0) { + PLOG(ERROR) << __func__ << ": failed to create shared memory region of " << bufferSizeBytes + << " bytes"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + mSharedMemoryFd = ndk::ScopedFileDescriptor(fd); + if (initSharedMemory(mSharedMemoryFd.get()) != ::android::OK) { + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + desc->sharedMemory.fd = mSharedMemoryFd.dup(); + desc->sharedMemory.size = bufferSizeBytes; + desc->burstSizeFrames = bufferSizeFrames / 2; + desc->flags = 0; + LOG(DEBUG) << __func__ << ": " << desc->toString(); + return ndk::ScopedAStatus::ok(); +} + +StreamInMmapStub::StreamInMmapStub(StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones) + : StreamIn(std::move(context), microphones), StreamMmapStub(&mContextInstance, sinkMetadata) {} + +StreamOutMmapStub::StreamOutMmapStub(StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), + StreamMmapStub(&mContextInstance, sourceMetadata) {} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/StreamOffloadStub.cpp b/audio/stub/StreamOffloadStub.cpp new file mode 100644 index 0000000..5f5f741 --- /dev/null +++ b/audio/stub/StreamOffloadStub.cpp @@ -0,0 +1,254 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_OffloadStream" +#include +#include +#include +#include + +#include "ApeHeader.h" +#include "core-impl/StreamOffloadStub.h" + +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +namespace offload { + +std::string DspSimulatorLogic::init() { + return ""; +} + +DspSimulatorLogic::Status DspSimulatorLogic::cycle() { + std::vector> clipNotifies; + // Simulate playback. + const int64_t timeBeginNs = ::android::uptimeNanos(); + usleep(1000); + const int64_t clipFramesPlayed = + (::android::uptimeNanos() - timeBeginNs) * mSharedState.sampleRate / NANOS_PER_SECOND; + const int64_t bufferFramesConsumed = clipFramesPlayed / 2; // assume 1:2 compression ratio + int64_t bufferFramesLeft = 0, bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + { + std::lock_guard l(mSharedState.lock); + mSharedState.bufferFramesLeft = + mSharedState.bufferFramesLeft > bufferFramesConsumed + ? mSharedState.bufferFramesLeft - bufferFramesConsumed + : 0; + int64_t framesPlayed = clipFramesPlayed; + while (framesPlayed > 0 && !mSharedState.clipFramesLeft.empty()) { + LOG(VERBOSE) << __func__ << ": clips: " + << ::android::internal::ToString(mSharedState.clipFramesLeft); + const bool hasNextClip = mSharedState.clipFramesLeft.size() > 1; + if (mSharedState.clipFramesLeft[0] > framesPlayed) { + mSharedState.clipFramesLeft[0] -= framesPlayed; + framesPlayed = 0; + if (mSharedState.clipFramesLeft[0] <= mSharedState.earlyNotifyFrames) { + clipNotifies.emplace_back(mSharedState.clipFramesLeft[0], hasNextClip); + } + } else { + clipNotifies.emplace_back(0 /*clipFramesLeft*/, hasNextClip); + framesPlayed -= mSharedState.clipFramesLeft[0]; + mSharedState.clipFramesLeft.erase(mSharedState.clipFramesLeft.begin()); + if (!hasNextClip) { + // Since it's a simulation, the buffer consumption rate it not real, + // thus 'bufferFramesLeft' might still have something, need to erase it. + mSharedState.bufferFramesLeft = 0; + } + } + } + bufferFramesLeft = mSharedState.bufferFramesLeft; + bufferNotifyFrames = mSharedState.bufferNotifyFrames; + if (bufferFramesLeft <= bufferNotifyFrames) { + // Suppress further notifications. + mSharedState.bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + } + } + if (bufferFramesLeft <= bufferNotifyFrames) { + LOG(DEBUG) << __func__ << ": sending onBufferStateChange: " << bufferFramesLeft; + mSharedState.callback->onBufferStateChange(bufferFramesLeft); + } + for (const auto& notify : clipNotifies) { + LOG(DEBUG) << __func__ << ": sending onClipStateChange: " << notify.first << ", " + << notify.second; + mSharedState.callback->onClipStateChange(notify.first, notify.second); + } + return Status::CONTINUE; +} + +} // namespace offload + +using offload::DspSimulatorState; + +DriverOffloadStubImpl::DriverOffloadStubImpl(const StreamContext& context) + : DriverStubImpl(context, 0 /*asyncSleepTimeUs*/), + mBufferNotifyFrames(static_cast(context.getBufferSizeInFrames()) / 2), + mState{context.getFormat().encoding, context.getSampleRate(), + 250 /*earlyNotifyMs*/ * context.getSampleRate() / MILLIS_PER_SECOND}, + mDspWorker(mState) { + LOG_IF(FATAL, !mIsAsynchronous) << "The steam must be used in asynchronous mode"; +} + +::android::status_t DriverOffloadStubImpl::init(DriverCallbackInterface* callback) { + RETURN_STATUS_IF_ERROR(DriverStubImpl::init(callback)); + if (!StreamOffloadStub::getSupportedEncodings().count(mState.formatEncoding)) { + LOG(ERROR) << __func__ << ": encoded format \"" << mState.formatEncoding + << "\" is not supported"; + return ::android::NO_INIT; + } + mState.callback = callback; + return ::android::OK; +} + +::android::status_t DriverOffloadStubImpl::drain(StreamDescriptor::DrainMode drainMode) { + RETURN_STATUS_IF_ERROR(DriverStubImpl::drain(drainMode)); + std::lock_guard l(mState.lock); + if (!mState.clipFramesLeft.empty()) { + // Cut playback of the current clip. + mState.clipFramesLeft[0] = std::min(mState.earlyNotifyFrames * 2, mState.clipFramesLeft[0]); + if (drainMode == StreamDescriptor::DrainMode::DRAIN_ALL) { + // Make sure there are no clips after the current one. + mState.clipFramesLeft.resize(1); + } + } + mState.bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + return ::android::OK; +} + +::android::status_t DriverOffloadStubImpl::flush() { + RETURN_STATUS_IF_ERROR(DriverStubImpl::flush()); + mDspWorker.pause(); + { + std::lock_guard l(mState.lock); + mState.clipFramesLeft.clear(); + mState.bufferFramesLeft = 0; + mState.bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + } + return ::android::OK; +} + +::android::status_t DriverOffloadStubImpl::pause() { + RETURN_STATUS_IF_ERROR(DriverStubImpl::pause()); + mDspWorker.pause(); + { + std::lock_guard l(mState.lock); + mState.bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + } + return ::android::OK; +} + +::android::status_t DriverOffloadStubImpl::start() { + RETURN_STATUS_IF_ERROR(DriverStubImpl::start()); + RETURN_STATUS_IF_ERROR(startWorkerIfNeeded()); + bool hasClips; // Can be start after paused draining. + { + std::lock_guard l(mState.lock); + hasClips = !mState.clipFramesLeft.empty(); + LOG(DEBUG) << __func__ + << ": clipFramesLeft: " << ::android::internal::ToString(mState.clipFramesLeft); + mState.bufferNotifyFrames = DspSimulatorState::kSkipBufferNotifyFrames; + } + if (hasClips) { + mDspWorker.resume(); + } + return ::android::OK; +} + +::android::status_t DriverOffloadStubImpl::transfer(void* buffer, size_t frameCount, + size_t* actualFrameCount, int32_t* latencyMs) { + RETURN_STATUS_IF_ERROR( + DriverStubImpl::transfer(buffer, frameCount, actualFrameCount, latencyMs)); + RETURN_STATUS_IF_ERROR(startWorkerIfNeeded()); + // Scan the buffer for clip headers. + *actualFrameCount = frameCount; + while (buffer != nullptr && frameCount > 0) { + ApeHeader* apeHeader = nullptr; + void* prevBuffer = buffer; + buffer = findApeHeader(prevBuffer, frameCount * mFrameSizeBytes, &apeHeader); + if (buffer != nullptr && apeHeader != nullptr) { + // Frame count does not include the size of the header data. + const size_t headerSizeFrames = + (static_cast(buffer) - static_cast(prevBuffer)) / + mFrameSizeBytes; + frameCount -= headerSizeFrames; + *actualFrameCount = frameCount; + // Stage the clip duration into the DSP worker's queue. + const int64_t clipDurationFrames = getApeClipDurationFrames(apeHeader); + const int32_t clipSampleRate = apeHeader->sampleRate; + LOG(DEBUG) << __func__ << ": found APE clip of " << clipDurationFrames << " frames, " + << "sample rate: " << clipSampleRate; + if (clipSampleRate == mState.sampleRate) { + std::lock_guard l(mState.lock); + mState.clipFramesLeft.push_back(clipDurationFrames); + } else { + LOG(ERROR) << __func__ << ": clip sample rate " << clipSampleRate + << " does not match stream sample rate " << mState.sampleRate; + } + } else { + frameCount = 0; + } + } + { + std::lock_guard l(mState.lock); + mState.bufferFramesLeft = *actualFrameCount; + mState.bufferNotifyFrames = mBufferNotifyFrames; + } + mDspWorker.resume(); + return ::android::OK; +} + +void DriverOffloadStubImpl::shutdown() { + LOG(DEBUG) << __func__ << ": stopping the DSP simulator worker"; + mDspWorker.stop(); + DriverStubImpl::shutdown(); +} + +::android::status_t DriverOffloadStubImpl::startWorkerIfNeeded() { + if (!mDspWorkerStarted) { + // This is an "audio service thread," must have elevated priority. + if (!mDspWorker.start("dsp_sim", ANDROID_PRIORITY_URGENT_AUDIO)) { + return ::android::NO_INIT; + } + mDspWorkerStarted = true; + } + return ::android::OK; +} + +// static +const std::set& StreamOffloadStub::getSupportedEncodings() { + static const std::set kSupportedEncodings = { + "audio/x-ape", + }; + return kSupportedEncodings; +} + +StreamOffloadStub::StreamOffloadStub(StreamContext* context, const Metadata& metadata) + : StreamCommonImpl(context, metadata), DriverOffloadStubImpl(getContext()) {} + +StreamOffloadStub::~StreamOffloadStub() { + cleanupWorker(); +} + +StreamOutOffloadStub::StreamOutOffloadStub(StreamContext&& context, + const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), + StreamOffloadStub(&mContextInstance, sourceMetadata) {} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/stub/StreamStub.cpp b/audio/stub/StreamStub.cpp new file mode 100644 index 0000000..2278880 --- /dev/null +++ b/audio/stub/StreamStub.cpp @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_Stream" +#include + +#include "core-impl/Module.h" +#include "core-impl/StreamStub.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +StreamStub::StreamStub(StreamContext* context, const Metadata& metadata) + : StreamCommonImpl(context, metadata), DriverStubImpl(getContext()) {} + +StreamStub::~StreamStub() { + cleanupWorker(); +} + +StreamInStub::StreamInStub(StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones) + : StreamIn(std::move(context), microphones), StreamStub(&mContextInstance, sinkMetadata) {} + +StreamOutStub::StreamOutStub(StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), StreamStub(&mContextInstance, sourceMetadata) {} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/tests/AlsaUtilsTest.cpp b/audio/tests/AlsaUtilsTest.cpp new file mode 100644 index 0000000..226eea0 --- /dev/null +++ b/audio/tests/AlsaUtilsTest.cpp @@ -0,0 +1,253 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AlsaUtilsTest" + +#include +#include +#include +#include +#include + +extern "C" { +#include +} + +namespace alsa = ::aidl::android::hardware::audio::core::alsa; + +namespace { + +const static constexpr float kInt16tTolerance = 4; +const static constexpr float kIntTolerance = 1; +const static constexpr float kFloatTolerance = 1e-4; +const static constexpr float kUnityGain = 1; +const static constexpr int32_t kInt24Min = -(1 << 23); +const static constexpr int32_t kInt24Max = (1 << 23) - 1; +const static constexpr float kFloatMin = -1; +const static constexpr float kFloatMax = 1; +const static int32_t kQ8_23Min = 0x80000000; +const static int32_t kQ8_23Max = 0x7FFFFFFF; +const static std::vector kInt16Buffer = {10000, 100, 0, INT16_MAX, + INT16_MIN, -2500, 1000, -5800}; +const static std::vector kFloatBuffer = {0.5, -0.6, kFloatMin, 0.01, kFloatMax, 0.0}; +const static std::vector kInt32Buffer = {100, 0, 8000, INT32_MAX, INT32_MIN, -300}; +const static std::vector kQ8_23Buffer = { + kQ8_23Min, kQ8_23Max, 0x00000000, 0x00000001, 0x00400000, static_cast(0xFFD33333)}; +const static std::vector kInt24Buffer = {200, 10, -100, 0, kInt24Min, kInt24Max}; + +template +void* CopyToBuffer(int& bytesToTransfer, std::vector& destBuffer, + const std::vector& srcBuffer) { + bytesToTransfer = srcBuffer.size() * sizeof(T); + destBuffer = srcBuffer; + return destBuffer.data(); +} + +template +void VerifyTypedBufferResults(const std::vector& bufferWithGain, const std::vector& srcBuffer, + float gain, float tolerance) { + for (size_t i = 0; i < srcBuffer.size(); i++) { + EXPECT_NEAR(srcBuffer[i] * gain, static_cast(bufferWithGain[i]), tolerance); + } +} + +template +void VerifyTypedBufferResultsWithClamp(const std::vector& bufferWithGain, + const std::vector& srcBuffer, float gain, float tolerance, + T minValue, T maxValue) { + for (size_t i = 0; i < srcBuffer.size(); i++) { + float expectedResult = std::clamp(srcBuffer[i] * gain, static_cast(minValue), + static_cast(maxValue)); + EXPECT_NEAR(expectedResult, static_cast(bufferWithGain[i]), tolerance); + } +} + +} // namespace + +using ApplyGainTestParameters = std::tuple; +enum { INDEX_PCM_FORMAT, INDEX_CHANNEL_COUNT, INDEX_GAIN }; + +class ApplyGainTest : public ::testing::TestWithParam { + protected: + void SetUp() override; + void VerifyBufferResult(const pcm_format pcmFormat, const float gain); + void VerifyBufferResultWithClamp(const pcm_format pcmFormat, const float gain); + + pcm_format mPcmFormat; + int mBufferSizeBytes; + void* mBuffer; + + private: + std::vector mInt16BufferToConvert; + std::vector mFloatBufferToConvert; + std::vector mInt32BufferToConvert; + std::vector mQ8_23BufferToConvert; + std::vector mInt24BufferToConvert; +}; + +void ApplyGainTest::SetUp() { + mPcmFormat = std::get(GetParam()); + switch (mPcmFormat) { + case PCM_FORMAT_S16_LE: + mBuffer = CopyToBuffer(mBufferSizeBytes, mInt16BufferToConvert, kInt16Buffer); + break; + case PCM_FORMAT_FLOAT_LE: + mBuffer = CopyToBuffer(mBufferSizeBytes, mFloatBufferToConvert, kFloatBuffer); + break; + case PCM_FORMAT_S32_LE: + mBuffer = CopyToBuffer(mBufferSizeBytes, mInt32BufferToConvert, kInt32Buffer); + break; + case PCM_FORMAT_S24_LE: + mBuffer = CopyToBuffer(mBufferSizeBytes, mQ8_23BufferToConvert, kQ8_23Buffer); + break; + case PCM_FORMAT_S24_3LE: { + std::vector original32BitBuffer(kInt24Buffer.begin(), kInt24Buffer.end()); + for (auto& val : original32BitBuffer) { + val <<= 8; + } + mInt24BufferToConvert = std::vector(kInt24Buffer.size()); + mBufferSizeBytes = kInt24Buffer.size() * 3 * sizeof(uint8_t); + memcpy_to_p24_from_i32(reinterpret_cast(mInt24BufferToConvert.data()), + original32BitBuffer.data(), kInt24Buffer.size()); + mBuffer = mInt24BufferToConvert.data(); + } break; + default: + FAIL() << "Unsupported pcm format: " << mPcmFormat; + return; + } +} + +void ApplyGainTest::VerifyBufferResult(const pcm_format pcmFormat, const float gain) { + switch (pcmFormat) { + case PCM_FORMAT_S16_LE: + VerifyTypedBufferResults(mInt16BufferToConvert, kInt16Buffer, gain, kInt16tTolerance); + break; + case PCM_FORMAT_FLOAT_LE: + VerifyTypedBufferResults(mFloatBufferToConvert, kFloatBuffer, gain, kFloatTolerance); + break; + case PCM_FORMAT_S32_LE: + VerifyTypedBufferResults(mInt32BufferToConvert, kInt32Buffer, gain, kIntTolerance); + break; + case PCM_FORMAT_S24_LE: { + for (size_t i = 0; i < kQ8_23Buffer.size(); i++) { + EXPECT_NEAR(float_from_q8_23(kQ8_23Buffer[i]) * gain, + static_cast(float_from_q8_23(mQ8_23BufferToConvert[i])), + kFloatTolerance); + } + } break; + case PCM_FORMAT_S24_3LE: { + size_t bufferSize = kInt24Buffer.size(); + std::vector result32BitBuffer(bufferSize); + memcpy_to_i32_from_p24(result32BitBuffer.data(), + reinterpret_cast(mInt24BufferToConvert.data()), + bufferSize); + for (size_t i = 0; i < bufferSize; i++) { + EXPECT_NEAR(kInt24Buffer[i] * gain, result32BitBuffer[i] >> 8, kIntTolerance); + } + } break; + default: + return; + } +} + +void ApplyGainTest::VerifyBufferResultWithClamp(const pcm_format pcmFormat, const float gain) { + switch (pcmFormat) { + case PCM_FORMAT_S16_LE: + VerifyTypedBufferResultsWithClamp(mInt16BufferToConvert, kInt16Buffer, gain, + kInt16tTolerance, static_cast(INT16_MIN), + static_cast(INT16_MAX)); + break; + case PCM_FORMAT_FLOAT_LE: + VerifyTypedBufferResultsWithClamp(mFloatBufferToConvert, kFloatBuffer, gain, + kFloatTolerance, kFloatMin, kFloatMax); + break; + case PCM_FORMAT_S32_LE: + VerifyTypedBufferResultsWithClamp(mInt32BufferToConvert, kInt32Buffer, gain, + kIntTolerance, INT32_MIN, INT32_MAX); + break; + case PCM_FORMAT_S24_LE: { + for (size_t i = 0; i < kQ8_23Buffer.size(); i++) { + float expectedResult = + std::clamp(float_from_q8_23(kQ8_23Buffer[i]) * gain, + float_from_q8_23(kQ8_23Min), float_from_q8_23(kQ8_23Max)); + EXPECT_NEAR(expectedResult, + static_cast(float_from_q8_23(mQ8_23BufferToConvert[i])), + kFloatTolerance); + } + } break; + case PCM_FORMAT_S24_3LE: { + size_t bufferSize = kInt24Buffer.size(); + std::vector result32BitBuffer(bufferSize); + memcpy_to_i32_from_p24(result32BitBuffer.data(), + reinterpret_cast(mInt24BufferToConvert.data()), + bufferSize); + for (size_t i = 0; i < bufferSize; i++) { + result32BitBuffer[i] >>= 8; + } + VerifyTypedBufferResultsWithClamp(result32BitBuffer, kInt24Buffer, gain, kIntTolerance, + kInt24Min, kInt24Max); + } break; + default: + return; + } +} + +TEST_P(ApplyGainTest, ApplyGain) { + float gain = std::get(GetParam()); + int channelCount = std::get(GetParam()); + + alsa::applyGain(mBuffer, gain, mBufferSizeBytes, mPcmFormat, channelCount); + + if (gain <= kUnityGain) { + VerifyBufferResult(mPcmFormat, gain); + } else { + VerifyBufferResultWithClamp(mPcmFormat, gain); + } +} + +std::string GetApplyGainTestName(const testing::TestParamInfo& info) { + std::string testNameStr; + switch (std::get(info.param)) { + case PCM_FORMAT_S16_LE: + testNameStr = "S16_LE"; + break; + case PCM_FORMAT_FLOAT_LE: + testNameStr = "Float_LE"; + break; + case PCM_FORMAT_S32_LE: + testNameStr = "S32_LE"; + break; + case PCM_FORMAT_S24_LE: + testNameStr = "S24_LE"; + break; + case PCM_FORMAT_S24_3LE: + testNameStr = "S24_3LE"; + break; + default: + testNameStr = "UnsupportedPcmFormat"; + break; + } + testNameStr += std::get(info.param) == 1 ? "_Mono_" : "_Stereo_"; + testNameStr += std::get(info.param) <= kUnityGain ? "WithoutClamp" : "WithClamp"; + return testNameStr; +} + +INSTANTIATE_TEST_SUITE_P(PerPcmFormat, ApplyGainTest, + testing::Combine(testing::Values(PCM_FORMAT_S16_LE, PCM_FORMAT_FLOAT_LE, + PCM_FORMAT_S32_LE, PCM_FORMAT_S24_LE, + PCM_FORMAT_S24_3LE), + testing::Values(1, 2), testing::Values(0.6f, 1.5f)), + GetApplyGainTestName); diff --git a/audio/tests/AudioPolicyConfigXmlConverterTest.cpp b/audio/tests/AudioPolicyConfigXmlConverterTest.cpp new file mode 100644 index 0000000..572bc5a --- /dev/null +++ b/audio/tests/AudioPolicyConfigXmlConverterTest.cpp @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #include +// #include +// #include + +#include +#include +#define LOG_TAG "AudioPolicyConfigXmlConverterTest" +#include + +#include +#include + +using aidl::android::hardware::audio::core::internal::AudioPolicyConfigXmlConverter; +using aidl::android::media::audio::common::AudioFormatDescription; + +namespace { + +void ValidateAudioFormatDescription(const AudioFormatDescription& format) { + auto conv = ::aidl::android::aidl2legacy_AudioFormatDescription_audio_format_t(format); + ASSERT_TRUE(conv.ok()) << format.toString(); +} + +} // namespace + +TEST(AudioPolicyConfigXmlConverterTest, DefaultSurroundSoundConfigIsValid) { + auto config = AudioPolicyConfigXmlConverter::getDefaultSurroundSoundConfig(); + for (const auto& family : config.formatFamilies) { + EXPECT_NO_FATAL_FAILURE(ValidateAudioFormatDescription(family.primaryFormat)); + SCOPED_TRACE(family.primaryFormat.toString()); + for (const auto& sub : family.subFormats) { + EXPECT_NO_FATAL_FAILURE(ValidateAudioFormatDescription(sub)); + } + } +} diff --git a/audio/usb/ModuleUsb.cpp b/audio/usb/ModuleUsb.cpp new file mode 100644 index 0000000..1d97bc4 --- /dev/null +++ b/audio/usb/ModuleUsb.cpp @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_ModuleUsb" +#include +#include + +#include "UsbAlsaMixerControl.h" +#include "alsa/Utils.h" +#include "core-impl/ModuleUsb.h" +#include "core-impl/StreamUsb.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::AudioPort; +using aidl::android::media::audio::common::AudioPortConfig; +using aidl::android::media::audio::common::AudioPortExt; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +namespace { + +bool isUsbDevicePort(const AudioPort& audioPort) { + return audioPort.ext.getTag() == AudioPortExt::Tag::device && + audioPort.ext.get().device.type.connection == + AudioDeviceDescription::CONNECTION_USB; +} + +} // namespace + +ndk::ScopedAStatus ModuleUsb::getTelephony(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleUsb::getBluetooth(std::shared_ptr* _aidl_return) { + *_aidl_return = nullptr; + LOG(DEBUG) << __func__ << ": returning null"; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus ModuleUsb::getMicMute(bool* _aidl_return __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleUsb::setMicMute(bool in_mute __unused) { + LOG(DEBUG) << __func__ << ": is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +ndk::ScopedAStatus ModuleUsb::createInputStream(StreamContext&& context, + const SinkMetadata& sinkMetadata, + const std::vector& microphones, + std::shared_ptr* result) { + return createStreamInstance(result, std::move(context), sinkMetadata, microphones); +} + +ndk::ScopedAStatus ModuleUsb::createOutputStream(StreamContext&& context, + const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo, + std::shared_ptr* result) { + if (offloadInfo.has_value()) { + LOG(ERROR) << __func__ << ": offload is not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + return createStreamInstance(result, std::move(context), sourceMetadata, + offloadInfo); +} + +ndk::ScopedAStatus ModuleUsb::populateConnectedDevicePort(AudioPort* audioPort, + int32_t nextPortId) { + if (!isUsbDevicePort(*audioPort)) { + LOG(ERROR) << __func__ << ": port id " << audioPort->id << " is not a usb device port"; + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + return ModuleAlsa::populateConnectedDevicePort(audioPort, nextPortId); +} + +ndk::ScopedAStatus ModuleUsb::checkAudioPatchEndpointsMatch( + const std::vector& sources, const std::vector& sinks) { + for (const auto& source : sources) { + for (const auto& sink : sinks) { + if (source->sampleRate != sink->sampleRate || + source->channelMask != sink->channelMask || source->format != sink->format) { + LOG(ERROR) << __func__ + << ": mismatch port configuration, source=" << source->toString() + << ", sink=" << sink->toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + } + } + return ndk::ScopedAStatus::ok(); +} + +void ModuleUsb::onExternalDeviceConnectionChanged( + const ::aidl::android::media::audio::common::AudioPort& audioPort, bool connected) { + if (!isUsbDevicePort(audioPort)) { + return; + } + auto profile = alsa::getDeviceProfile(audioPort); + if (!profile.has_value()) { + return; + } + usb::UsbAlsaMixerControl::getInstance().setDeviceConnectionState(profile->card, getMasterMute(), + getMasterVolume(), connected); +} + +ndk::ScopedAStatus ModuleUsb::onMasterMuteChanged(bool mute) { + return usb::UsbAlsaMixerControl::getInstance().setMasterMute(mute); +} + +ndk::ScopedAStatus ModuleUsb::onMasterVolumeChanged(float volume) { + return usb::UsbAlsaMixerControl::getInstance().setMasterVolume(volume); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/usb/StreamUsb.cpp b/audio/usb/StreamUsb.cpp new file mode 100644 index 0000000..9b10432 --- /dev/null +++ b/audio/usb/StreamUsb.cpp @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "AHAL_StreamUsb" +#include +#include + +#include "UsbAlsaMixerControl.h" +#include "core-impl/StreamUsb.h" + +using aidl::android::hardware::audio::common::SinkMetadata; +using aidl::android::hardware::audio::common::SourceMetadata; +using aidl::android::media::audio::common::AudioDevice; +using aidl::android::media::audio::common::AudioOffloadInfo; +using aidl::android::media::audio::common::MicrophoneDynamicInfo; +using aidl::android::media::audio::common::MicrophoneInfo; + +namespace aidl::android::hardware::audio::core { + +StreamUsb::StreamUsb(StreamContext* context, const Metadata& metadata) + : StreamAlsa(context, metadata, 1 /*readWriteRetries*/) {} + +ndk::ScopedAStatus StreamUsb::setConnectedDevices( + const std::vector& connectedDevices) { + if (mIsInput && connectedDevices.size() > 1) { + LOG(ERROR) << __func__ << ": wrong device size(" << connectedDevices.size() + << ") for input stream"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + std::vector connectedDeviceProfiles; + for (const auto& connectedDevice : connectedDevices) { + auto profile = alsa::getDeviceProfile(connectedDevice, mIsInput); + if (!profile.has_value()) { + LOG(ERROR) << __func__ + << ": unsupported device address=" << connectedDevice.address.toString(); + return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT); + } + connectedDeviceProfiles.push_back(*profile); + } + RETURN_STATUS_IF_ERROR(StreamCommonImpl::setConnectedDevices(connectedDevices)); + std::lock_guard guard(mLock); + mConnectedDeviceProfiles = std::move(connectedDeviceProfiles); + mConnectedDevicesUpdated.store(true, std::memory_order_release); + return ndk::ScopedAStatus::ok(); +} + +::android::status_t StreamUsb::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount, + int32_t* latencyMs) { + if (mConnectedDevicesUpdated.load(std::memory_order_acquire)) { + // 'setConnectedDevices' was called. I/O will be restarted. + *actualFrameCount = 0; + *latencyMs = StreamDescriptor::LATENCY_UNKNOWN; + return ::android::OK; + } + return StreamAlsa::transfer(buffer, frameCount, actualFrameCount, latencyMs); +} + +std::vector StreamUsb::getDeviceProfiles() { + std::vector connectedDevices; + { + std::lock_guard guard(mLock); + connectedDevices = mConnectedDeviceProfiles; + mConnectedDevicesUpdated.store(false, std::memory_order_release); + } + return connectedDevices; +} + +StreamInUsb::StreamInUsb(StreamContext&& context, const SinkMetadata& sinkMetadata, + const std::vector& microphones) + : StreamIn(std::move(context), microphones), StreamUsb(&mContextInstance, sinkMetadata) {} + +ndk::ScopedAStatus StreamInUsb::getActiveMicrophones( + std::vector* _aidl_return __unused) { + LOG(DEBUG) << __func__ << ": not supported"; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); +} + +StreamOutUsb::StreamOutUsb(StreamContext&& context, const SourceMetadata& sourceMetadata, + const std::optional& offloadInfo) + : StreamOut(std::move(context), offloadInfo), + StreamUsb(&mContextInstance, sourceMetadata), + StreamOutHwVolumeHelper(&mContextInstance) {} + +ndk::ScopedAStatus StreamOutUsb::getHwVolume(std::vector* _aidl_return) { + return getHwVolumeImpl(_aidl_return); +} + +ndk::ScopedAStatus StreamOutUsb::setHwVolume(const std::vector& in_channelVolumes) { + auto currentVolumes = mHwVolumes; + RETURN_STATUS_IF_ERROR(setHwVolumeImpl(in_channelVolumes)); + // Avoid using mConnectedDeviceProfiles because it requires a lock. + for (const auto& device : getConnectedDevices()) { + if (auto deviceProfile = alsa::getDeviceProfile(device, mIsInput); + deviceProfile.has_value()) { + if (auto result = usb::UsbAlsaMixerControl::getInstance().setVolumes( + deviceProfile->card, in_channelVolumes); + !result.isOk()) { + LOG(ERROR) << __func__ + << ": failed to set volume for device address=" << *deviceProfile; + mHwVolumes = currentVolumes; + return result; + } + } + } + return ndk::ScopedAStatus::ok(); +} + +} // namespace aidl::android::hardware::audio::core diff --git a/audio/usb/UsbAlsaMixerControl.cpp b/audio/usb/UsbAlsaMixerControl.cpp new file mode 100644 index 0000000..0a49446 --- /dev/null +++ b/audio/usb/UsbAlsaMixerControl.cpp @@ -0,0 +1,98 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_UsbAlsaMixerControl" +#include + +#include + +#include "UsbAlsaMixerControl.h" + +namespace aidl::android::hardware::audio::core::usb { + +// static +UsbAlsaMixerControl& UsbAlsaMixerControl::getInstance() { + static UsbAlsaMixerControl gInstance; + return gInstance; +} + +void UsbAlsaMixerControl::setDeviceConnectionState(int card, bool masterMuted, float masterVolume, + bool connected) { + LOG(DEBUG) << __func__ << ": card=" << card << ", connected=" << connected; + if (connected) { + auto alsaMixer = std::make_shared(card); + if (!alsaMixer->isValid()) { + return; + } + alsaMixer->setMasterMute(masterMuted); + alsaMixer->setMasterVolume(masterVolume); + const std::lock_guard guard(mLock); + mMixerControls.emplace(card, alsaMixer); + } else { + const std::lock_guard guard(mLock); + mMixerControls.erase(card); + } +} + +ndk::ScopedAStatus UsbAlsaMixerControl::setMasterMute(bool mute) { + auto alsaMixers = getAlsaMixers(); + for (auto it = alsaMixers.begin(); it != alsaMixers.end(); ++it) { + if (auto result = it->second->setMasterMute(mute); !result.isOk()) { + // Return illegal state if there are multiple devices connected and one of them fails + // to set master mute. Otherwise, return the error from calling `setMasterMute`. + LOG(ERROR) << __func__ << ": failed to set master mute for card=" << it->first; + return alsaMixers.size() > 1 ? ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE) + : std::move(result); + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus UsbAlsaMixerControl::setMasterVolume(float volume) { + auto alsaMixers = getAlsaMixers(); + for (auto it = alsaMixers.begin(); it != alsaMixers.end(); ++it) { + if (auto result = it->second->setMasterVolume(volume); !result.isOk()) { + // Return illegal state if there are multiple devices connected and one of them fails + // to set master volume. Otherwise, return the error from calling `setMasterVolume`. + LOG(ERROR) << __func__ << ": failed to set master volume for card=" << it->first; + return alsaMixers.size() > 1 ? ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE) + : std::move(result); + } + } + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus UsbAlsaMixerControl::setVolumes(int card, const std::vector& volumes) { + auto alsaMixer = getAlsaMixer(card); + if (alsaMixer == nullptr) { + LOG(ERROR) << __func__ << ": no mixer control found for card=" << card; + return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION); + } + return alsaMixer->setVolumes(volumes); +} + +std::shared_ptr UsbAlsaMixerControl::getAlsaMixer(int card) { + const std::lock_guard guard(mLock); + const auto it = mMixerControls.find(card); + return it == mMixerControls.end() ? nullptr : it->second; +} + +std::map> UsbAlsaMixerControl::getAlsaMixers() { + const std::lock_guard guard(mLock); + return mMixerControls; +} + +} // namespace aidl::android::hardware::audio::core::usb diff --git a/audio/usb/UsbAlsaMixerControl.h b/audio/usb/UsbAlsaMixerControl.h new file mode 100644 index 0000000..c3265f8 --- /dev/null +++ b/audio/usb/UsbAlsaMixerControl.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include + +#include "alsa/Mixer.h" + +namespace aidl::android::hardware::audio::core::usb { + +class UsbAlsaMixerControl { + public: + static UsbAlsaMixerControl& getInstance(); + + void setDeviceConnectionState(int card, bool masterMuted, float masterVolume, bool connected); + + // Master volume settings will be applied to all sound cards, it is only set by the + // USB module. + ndk::ScopedAStatus setMasterMute(bool muted); + ndk::ScopedAStatus setMasterVolume(float volume); + // The volume settings can be different on sound cards. It is controlled by streams. + ndk::ScopedAStatus setVolumes(int card, const std::vector& volumes); + + private: + std::shared_ptr getAlsaMixer(int card); + std::map> getAlsaMixers(); + + std::mutex mLock; + // A map whose key is the card number and value is a shared pointer to corresponding + // AlsaMixer object. + std::map> mMixerControls GUARDED_BY(mLock); +}; + +} // namespace aidl::android::hardware::audio::core::usb diff --git a/audio/virtualizer/Android.bp b/audio/virtualizer/Android.bp new file mode 100644 index 0000000..91d2abb --- /dev/null +++ b/audio/virtualizer/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libvirtualizersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "VirtualizerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/virtualizer/VirtualizerSw.cpp b/audio/virtualizer/VirtualizerSw.cpp new file mode 100644 index 0000000..091b0b7 --- /dev/null +++ b/audio/virtualizer/VirtualizerSw.cpp @@ -0,0 +1,228 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define LOG_TAG "AHAL_VirtualizerSw" +#include +#include +#include +#include + +#include "VirtualizerSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidVirtualizerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::hardware::audio::effect::VirtualizerSw; +using aidl::android::media::audio::common::AudioChannelLayout; +using aidl::android::media::audio::common::AudioDeviceDescription; +using aidl::android::media::audio::common::AudioDeviceType; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVirtualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVirtualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = VirtualizerSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string VirtualizerSw::kEffectName = "VirtualizerSw"; + +const std::vector VirtualizerSw::kRanges = { + MAKE_RANGE(Virtualizer, strengthPm, 0, 1000), + /* speakerAngle is get-only, set min > max */ + MAKE_RANGE(Virtualizer, speakerAngles, {Virtualizer::ChannelAngle({.channel = 1})}, + {Virtualizer::ChannelAngle({.channel = 0})})}; + +const Capability VirtualizerSw::kCapability = { + .range = Range::make(VirtualizerSw::kRanges)}; + +const Descriptor VirtualizerSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidVirtualizer(), + .uuid = getEffectImplUuidVirtualizerSw()}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = VirtualizerSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = VirtualizerSw::kCapability}; + +ndk::ScopedAStatus VirtualizerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus VirtualizerSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::virtualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + auto& vrParam = specific.get(); + RETURN_IF(!inRange(vrParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = vrParam.getTag(); + + switch (tag) { + case Virtualizer::strengthPm: { + RETURN_IF(mContext->setVrStrength(vrParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setStrengthPmFailed"); + return ndk::ScopedAStatus::ok(); + } + case Virtualizer::device: { + RETURN_IF(mContext->setForcedDevice(vrParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setDeviceFailed"); + return ndk::ScopedAStatus::ok(); + } + case Virtualizer::speakerAngles: + FALLTHROUGH_INTENDED; + case Virtualizer::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VirtualizerTagNotSupported"); + } + } +} + +ndk::ScopedAStatus VirtualizerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::virtualizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto vrId = id.get(); + auto vrIdTag = vrId.getTag(); + switch (vrIdTag) { + case Virtualizer::Id::commonTag: + return getParameterVirtualizer(vrId.get(), specific); + case Virtualizer::Id::speakerAnglesPayload: + return getSpeakerAngles(vrId.get(), specific); + case Virtualizer::Id::vendorExtensionTag: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VirtualizerTagNotSupported"); + } + } +} + +ndk::ScopedAStatus VirtualizerSw::getParameterVirtualizer(const Virtualizer::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + Virtualizer vrParam; + switch (tag) { + case Virtualizer::strengthPm: { + vrParam.set(mContext->getVrStrength()); + break; + } + case Virtualizer::device: { + vrParam.set(mContext->getForcedDevice()); + break; + } + case Virtualizer::speakerAngles: + FALLTHROUGH_INTENDED; + case Virtualizer::vendor: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VirtualizerTagNotSupported"); + } + } + + specific->set(vrParam); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus VirtualizerSw::getSpeakerAngles(const Virtualizer::SpeakerAnglesPayload payload, + Parameter::Specific* specific) { + std::vector angles; + const auto chNum = ::aidl::android::hardware::audio::common::getChannelCount(payload.layout); + if (chNum == 1) { + angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT, + .azimuthDegree = 0, + .elevationDegree = 0}}; + } else if (chNum == 2) { + angles = {{.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_LEFT, + .azimuthDegree = -90, + .elevationDegree = 0}, + {.channel = (int32_t)AudioChannelLayout::CHANNEL_FRONT_RIGHT, + .azimuthDegree = 90, + .elevationDegree = 0}}; + } else { + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "supportUpTo2Ch"); + } + + Virtualizer param = Virtualizer::make(angles); + specific->set(param); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr VirtualizerSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode VirtualizerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status VirtualizerSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode VirtualizerSwContext::setVrStrength(int strength) { + mStrength = strength; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/virtualizer/VirtualizerSw.h b/audio/virtualizer/VirtualizerSw.h new file mode 100644 index 0000000..9287838 --- /dev/null +++ b/audio/virtualizer/VirtualizerSw.h @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class VirtualizerSwContext final : public EffectContext { + public: + VirtualizerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + RetCode setVrStrength(int strength); + int getVrStrength() const { return mStrength; } + RetCode setForcedDevice( + const ::aidl::android::media::audio::common::AudioDeviceDescription& device) { + mForceDevice = device; + return RetCode::SUCCESS; + } + aidl::android::media::audio::common::AudioDeviceDescription getForcedDevice() const { + return mForceDevice; + } + + private: + int mStrength = 0; + ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice; +}; + +class VirtualizerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + VirtualizerSw() { LOG(DEBUG) << __func__; } + ~VirtualizerSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) override; + std::string getEffectName() override { return kEffectName; } + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + + ndk::ScopedAStatus getParameterVirtualizer(const Virtualizer::Tag& tag, + Parameter::Specific* specific) REQUIRES(mImplMutex); + ndk::ScopedAStatus getSpeakerAngles(const Virtualizer::SpeakerAnglesPayload payload, + Parameter::Specific* specific) REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/visualizer/Android.bp b/audio/visualizer/Android.bp new file mode 100644 index 0000000..af8f574 --- /dev/null +++ b/audio/visualizer/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libvisualizersw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "VisualizerSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default", + ], +} diff --git a/audio/visualizer/VisualizerSw.cpp b/audio/visualizer/VisualizerSw.cpp new file mode 100644 index 0000000..54f7f1c --- /dev/null +++ b/audio/visualizer/VisualizerSw.cpp @@ -0,0 +1,230 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AHAL_VisualizerSw" + +#include +#include + +#include "VisualizerSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidVisualizerSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::hardware::audio::effect::VisualizerSw; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizerSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = VisualizerSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string VisualizerSw::kEffectName = "VisualizerSw"; + +/* capabilities */ +const std::vector VisualizerSw::kRanges = { + MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerSwContext::kMaxLatencyMs), + MAKE_RANGE(Visualizer, captureSamples, VisualizerSwContext::kMinCaptureSize, + VisualizerSwContext::kMaxCaptureSize)}; + +const Capability VisualizerSw::kCapability = { + .range = Range::make(VisualizerSw::kRanges)}; + +const Descriptor VisualizerSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidVisualizer(), + .uuid = getEffectImplUuidVisualizerSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::NONE}, + .name = VisualizerSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = VisualizerSw::kCapability}; + +ndk::ScopedAStatus VisualizerSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus VisualizerSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::visualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + auto& vsParam = specific.get(); + RETURN_IF(!inRange(vsParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = vsParam.getTag(); + + switch (tag) { + case Visualizer::captureSamples: { + RETURN_IF(mContext->setVsCaptureSize(vsParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setCaptureSizeFailed"); + return ndk::ScopedAStatus::ok(); + } + case Visualizer::scalingMode: { + RETURN_IF(mContext->setVsScalingMode(vsParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setScalingModeFailed"); + return ndk::ScopedAStatus::ok(); + } + case Visualizer::measurementMode: { + RETURN_IF(mContext->setVsMeasurementMode(vsParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setMeasurementModeFailed"); + return ndk::ScopedAStatus::ok(); + } + case Visualizer::latencyMs: { + RETURN_IF(mContext->setVsLatency(vsParam.get()) != + RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "setLatencyFailed"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VisualizerTagNotSupported"); + } + } +} + +ndk::ScopedAStatus VisualizerSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::visualizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto vsId = id.get(); + auto vsIdTag = vsId.getTag(); + switch (vsIdTag) { + case Visualizer::Id::commonTag: + return getParameterVisualizer(vsId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VisualizerTagNotSupported"); + } +} +ndk::ScopedAStatus VisualizerSw::getParameterVisualizer(const Visualizer::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + Visualizer vsParam; + switch (tag) { + case Visualizer::captureSamples: { + vsParam.set(mContext->getVsCaptureSize()); + break; + } + case Visualizer::scalingMode: { + vsParam.set(mContext->getVsScalingMode()); + break; + } + case Visualizer::measurementMode: { + vsParam.set(mContext->getVsMeasurementMode()); + break; + } + case Visualizer::measurement: { + vsParam.set(mContext->getVsMeasurement()); + break; + } + case Visualizer::captureSampleBuffer: { + vsParam.set(mContext->getVsCaptureSampleBuffer()); + break; + } + case Visualizer::latencyMs: { + vsParam.set(mContext->getVsLatency()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VisualizerTagNotSupported"); + } + } + specific->set(vsParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr VisualizerSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode VisualizerSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status VisualizerSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode VisualizerSwContext::setVsCaptureSize(int captureSize) { + mCaptureSize = captureSize; + return RetCode::SUCCESS; +} + +RetCode VisualizerSwContext::setVsScalingMode(Visualizer::ScalingMode scalingMode) { + mScalingMode = scalingMode; + return RetCode::SUCCESS; +} + +RetCode VisualizerSwContext::setVsMeasurementMode(Visualizer::MeasurementMode measurementMode) { + mMeasurementMode = measurementMode; + return RetCode::SUCCESS; +} + +RetCode VisualizerSwContext::setVsLatency(int latency) { + mLatency = latency; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/visualizer/VisualizerSw.h b/audio/visualizer/VisualizerSw.h new file mode 100644 index 0000000..819351a --- /dev/null +++ b/audio/visualizer/VisualizerSw.h @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include +#include +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class VisualizerSwContext final : public EffectContext { + public: + // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and + // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft. + static constexpr int32_t kMinCaptureSize = VISUALIZER_CAPTURE_SIZE_MIN; + static constexpr int32_t kMaxCaptureSize = VISUALIZER_CAPTURE_SIZE_MAX; + static constexpr int32_t kMaxLatencyMs = 3000; + VisualizerSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + mCaptureSampleBuffer.resize(kMaxCaptureSize); + fill(mCaptureSampleBuffer.begin(), mCaptureSampleBuffer.end(), 0x80); + } + + RetCode setVsCaptureSize(int captureSize); + int getVsCaptureSize() const { return mCaptureSize; } + + RetCode setVsScalingMode(Visualizer::ScalingMode scalingMode); + Visualizer::ScalingMode getVsScalingMode() const { return mScalingMode; } + + RetCode setVsMeasurementMode(Visualizer::MeasurementMode measurementMode); + Visualizer::MeasurementMode getVsMeasurementMode() const { return mMeasurementMode; } + + RetCode setVsLatency(int latency); + int getVsLatency() const { return mLatency; } + + Visualizer::Measurement getVsMeasurement() const { return mMeasurement; } + std::vector getVsCaptureSampleBuffer() const { return mCaptureSampleBuffer; } + + private: + int mCaptureSize = kMaxCaptureSize; + Visualizer::ScalingMode mScalingMode = Visualizer::ScalingMode::NORMALIZED; + Visualizer::MeasurementMode mMeasurementMode = Visualizer::MeasurementMode::NONE; + int mLatency = 0; + const Visualizer::Measurement mMeasurement = {0, 0}; + std::vector mCaptureSampleBuffer; +}; + +class VisualizerSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + VisualizerSw() { LOG(DEBUG) << __func__; } + ~VisualizerSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + ndk::ScopedAStatus getParameterVisualizer(const Visualizer::Tag& tag, + Parameter::Specific* specific) REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/volume/Android.bp b/audio/volume/Android.bp new file mode 100644 index 0000000..a424f7e --- /dev/null +++ b/audio/volume/Android.bp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_team: "trendy_team_android_media_audio_framework", + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_library_shared { + name: "libvolumesw", + defaults: [ + "aidlaudioeffectservice_defaults", + ], + srcs: [ + "VolumeSw.cpp", + ":effectCommonFile", + ], + relative_install_path: "soundfx", + visibility: [ + "//hardware/interfaces/audio/aidl/default:__subpackages__", + ], +} diff --git a/audio/volume/VolumeSw.cpp b/audio/volume/VolumeSw.cpp new file mode 100644 index 0000000..dd019f6 --- /dev/null +++ b/audio/volume/VolumeSw.cpp @@ -0,0 +1,191 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define LOG_TAG "AHAL_VolumeSw" +#include +#include +#include + +#include "VolumeSw.h" + +using aidl::android::hardware::audio::effect::Descriptor; +using aidl::android::hardware::audio::effect::getEffectImplUuidVolumeSw; +using aidl::android::hardware::audio::effect::getEffectTypeUuidVolume; +using aidl::android::hardware::audio::effect::IEffect; +using aidl::android::hardware::audio::effect::State; +using aidl::android::hardware::audio::effect::VolumeSw; +using aidl::android::media::audio::common::AudioUuid; + +extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid, + std::shared_ptr* instanceSpp) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVolumeSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + if (instanceSpp) { + *instanceSpp = ndk::SharedRefBase::make(); + LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created"; + return EX_NONE; + } else { + LOG(ERROR) << __func__ << " invalid input parameter!"; + return EX_ILLEGAL_ARGUMENT; + } +} + +extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) { + if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVolumeSw()) { + LOG(ERROR) << __func__ << "uuid not supported"; + return EX_ILLEGAL_ARGUMENT; + } + *_aidl_return = VolumeSw::kDescriptor; + return EX_NONE; +} + +namespace aidl::android::hardware::audio::effect { + +const std::string VolumeSw::kEffectName = "VolumeSw"; + +const std::vector VolumeSw::kRanges = {MAKE_RANGE(Volume, levelDb, -9600, 0)}; + +const Capability VolumeSw::kCapability = {.range = Range::make(VolumeSw::kRanges)}; + +const Descriptor VolumeSw::kDescriptor = { + .common = {.id = {.type = getEffectTypeUuidVolume(), + .uuid = getEffectImplUuidVolumeSw(), + .proxy = std::nullopt}, + .flags = {.type = Flags::Type::INSERT, + .insert = Flags::Insert::FIRST, + .volume = Flags::Volume::CTRL}, + .name = VolumeSw::kEffectName, + .implementor = "The Android Open Source Project"}, + .capability = VolumeSw::kCapability}; + +ndk::ScopedAStatus VolumeSw::getDescriptor(Descriptor* _aidl_return) { + LOG(DEBUG) << __func__ << kDescriptor.toString(); + *_aidl_return = kDescriptor; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus VolumeSw::setParameterSpecific(const Parameter::Specific& specific) { + RETURN_IF(Parameter::Specific::volume != specific.getTag(), EX_ILLEGAL_ARGUMENT, + "EffectNotSupported"); + + auto& volParam = specific.get(); + RETURN_IF(!inRange(volParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange"); + auto tag = volParam.getTag(); + + switch (tag) { + case Volume::levelDb: { + RETURN_IF(mContext->setVolLevel(volParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "LevelNotSupported"); + return ndk::ScopedAStatus::ok(); + } + case Volume::mute: { + RETURN_IF(mContext->setVolMute(volParam.get()) != RetCode::SUCCESS, + EX_ILLEGAL_ARGUMENT, "MuteNotSupported"); + return ndk::ScopedAStatus::ok(); + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VolumeTagNotSupported"); + } + } +} + +ndk::ScopedAStatus VolumeSw::getParameterSpecific(const Parameter::Id& id, + Parameter::Specific* specific) { + auto tag = id.getTag(); + RETURN_IF(Parameter::Id::volumeTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag"); + auto volId = id.get(); + auto volIdTag = volId.getTag(); + switch (volIdTag) { + case Volume::Id::commonTag: + return getParameterVolume(volId.get(), specific); + default: + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VolumeTagNotSupported"); + } +} + +ndk::ScopedAStatus VolumeSw::getParameterVolume(const Volume::Tag& tag, + Parameter::Specific* specific) { + RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext"); + + Volume volParam; + switch (tag) { + case Volume::levelDb: { + volParam.set(mContext->getVolLevel()); + break; + } + case Volume::mute: { + volParam.set(mContext->getVolMute()); + break; + } + default: { + LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag); + return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT, + "VolumeTagNotSupported"); + } + } + + specific->set(volParam); + return ndk::ScopedAStatus::ok(); +} + +std::shared_ptr VolumeSw::createContext(const Parameter::Common& common) { + if (mContext) { + LOG(DEBUG) << __func__ << " context already exist"; + } else { + mContext = std::make_shared(1 /* statusFmqDepth */, common); + } + + return mContext; +} + +RetCode VolumeSw::releaseContext() { + if (mContext) { + mContext.reset(); + } + return RetCode::SUCCESS; +} + +// Processing method running in EffectWorker thread. +IEffect::Status VolumeSw::effectProcessImpl(float* in, float* out, int samples) { + // TODO: get data buffer and process. + LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples; + for (int i = 0; i < samples; i++) { + *out++ = *in++; + } + return {STATUS_OK, samples, samples}; +} + +RetCode VolumeSwContext::setVolLevel(int level) { + mLevel = level; + return RetCode::SUCCESS; +} + +RetCode VolumeSwContext::setVolMute(bool mute) { + // TODO : Add implementation to modify mute + mMute = mute; + return RetCode::SUCCESS; +} + +} // namespace aidl::android::hardware::audio::effect diff --git a/audio/volume/VolumeSw.h b/audio/volume/VolumeSw.h new file mode 100644 index 0000000..3fc0d97 --- /dev/null +++ b/audio/volume/VolumeSw.h @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include "effect-impl/EffectImpl.h" + +namespace aidl::android::hardware::audio::effect { + +class VolumeSwContext final : public EffectContext { + public: + VolumeSwContext(int statusDepth, const Parameter::Common& common) + : EffectContext(statusDepth, common) { + LOG(DEBUG) << __func__; + } + + RetCode setVolLevel(int level); + + int getVolLevel() const { return mLevel; } + + RetCode setVolMute(bool mute); + + bool getVolMute() const { return mMute; } + + private: + int mLevel = 0; + bool mMute = false; +}; + +class VolumeSw final : public EffectImpl { + public: + static const std::string kEffectName; + static const Capability kCapability; + static const Descriptor kDescriptor; + VolumeSw() { LOG(DEBUG) << __func__; } + ~VolumeSw() { + cleanUp(); + LOG(DEBUG) << __func__; + } + + ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override; + ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) + REQUIRES(mImplMutex) override; + ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific) + REQUIRES(mImplMutex) override; + + std::shared_ptr createContext(const Parameter::Common& common) + REQUIRES(mImplMutex) override; + RetCode releaseContext() REQUIRES(mImplMutex) override; + + IEffect::Status effectProcessImpl(float* in, float* out, int samples) + REQUIRES(mImplMutex) override; + std::string getEffectName() override { return kEffectName; } + + private: + static const std::vector kRanges; + std::shared_ptr mContext GUARDED_BY(mImplMutex); + + ndk::ScopedAStatus getParameterVolume(const Volume::Tag& tag, Parameter::Specific* specific) + REQUIRES(mImplMutex); +}; +} // namespace aidl::android::hardware::audio::effect