audio: import aidl hal

* hardware/interfaces/audio/aidl/default/ at android-16.0.0_r1.
This commit is contained in:
Konsta
2025-10-22 17:31:20 +03:00
parent 3292156c2a
commit 7d6df38051
168 changed files with 27991 additions and 0 deletions

View File

@@ -0,0 +1,56 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_AidlXsdc"
#include <android-base/logging.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
#include <media/TypeConverter.h>
#include "core-impl/AidlConversionXsdc.h"
using aidl::android::media::audio::common::AudioFormatDescription;
namespace xsd = android::audio::policy::configuration;
namespace aidl::android::hardware::audio::core::internal {
ConversionResult<AudioFormatDescription> xsdc2aidl_AudioFormatDescription(const std::string& xsdc) {
return legacy2aidl_audio_format_t_AudioFormatDescription(::android::formatFromString(xsdc));
}
ConversionResult<SurroundSoundConfig::SurroundFormatFamily> xsdc2aidl_SurroundFormatFamily(
const ::xsd::SurroundFormats::Format& xsdc) {
SurroundSoundConfig::SurroundFormatFamily aidl;
aidl.primaryFormat = VALUE_OR_RETURN(xsdc2aidl_AudioFormatDescription(xsdc.getName()));
if (xsdc.hasSubformats()) {
aidl.subFormats = VALUE_OR_RETURN(convertContainer<std::vector<AudioFormatDescription>>(
xsdc.getSubformats(), xsdc2aidl_AudioFormatDescription));
}
return aidl;
}
ConversionResult<SurroundSoundConfig> xsdc2aidl_SurroundSoundConfig(
const ::xsd::SurroundSound& xsdc) {
SurroundSoundConfig aidl;
if (!xsdc.hasFormats() || !xsdc.getFirstFormats()->hasFormat()) return aidl;
aidl.formatFamilies = VALUE_OR_RETURN(
convertContainer<std::vector<SurroundSoundConfig::SurroundFormatFamily>>(
xsdc.getFirstFormats()->getFormat(), xsdc2aidl_SurroundFormatFamily));
return aidl;
}
} // namespace aidl::android::hardware::audio::core::internal

369
audio/Android.bp Normal file
View File

@@ -0,0 +1,369 @@
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_defaults {
name: "aidlaudioservice_defaults",
vendor: true,
shared_libs: [
"libalsautilsv2",
"libaudio_aidl_conversion_common_ndk",
"libaudioaidlcommon",
"libaudioutils",
"libbase",
"libbinder_ndk",
"libcutils",
"libfmq",
"libnbaio_mono",
"liblog",
"libmedia_helper",
"libstagefright_foundation",
"libtinyalsav2",
"libutils",
"libxml2",
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
],
header_libs: [
"libaudio_system_headers",
"libaudioaidl_headers",
"libxsdc-utils",
],
cflags: [
"-DBACKEND_NDK",
],
}
cc_library {
name: "libaudioserviceexampleimpl",
defaults: [
"aidlaudioservice_defaults",
"latest_android_media_audio_common_types_ndk_shared",
"latest_android_hardware_audio_core_ndk_shared",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_bluetooth_audio_ndk_shared",
],
export_include_dirs: ["include"],
srcs: [
"AidlConversionXsdc.cpp",
"AudioPolicyConfigXmlConverter.cpp",
"Bluetooth.cpp",
"CapEngineConfigXmlConverter.cpp",
"Config.cpp",
"Configuration.cpp",
"EngineConfigXmlConverter.cpp",
"Module.cpp",
"ModulePrimary.cpp",
"SoundDose.cpp",
"Stream.cpp",
"Telephony.cpp",
"XsdcConversion.cpp",
"alsa/Mixer.cpp",
"alsa/ModuleAlsa.cpp",
"alsa/StreamAlsa.cpp",
"alsa/Utils.cpp",
"bluetooth/DevicePortProxy.cpp",
"bluetooth/ModuleBluetooth.cpp",
"bluetooth/StreamBluetooth.cpp",
"deprecated/StreamSwitcher.cpp",
"primary/PrimaryMixer.cpp",
"primary/StreamPrimary.cpp",
"r_submix/ModuleRemoteSubmix.cpp",
"r_submix/SubmixRoute.cpp",
"r_submix/StreamRemoteSubmix.cpp",
"stub/ApeHeader.cpp",
"stub/DriverStubImpl.cpp",
"stub/ModuleStub.cpp",
"stub/StreamMmapStub.cpp",
"stub/StreamOffloadStub.cpp",
"stub/StreamStub.cpp",
"usb/ModuleUsb.cpp",
"usb/StreamUsb.cpp",
"usb/UsbAlsaMixerControl.cpp",
],
generated_sources: [
"audio_policy_capengine_configuration_aidl_default",
"audio_policy_configuration_aidl_default",
"audio_policy_engine_configuration_aidl_default",
],
generated_headers: [
"audio_policy_capengine_configuration_aidl_default",
"audio_policy_configuration_aidl_default",
"audio_policy_engine_configuration_aidl_default",
],
export_generated_headers: [
"audio_policy_capengine_configuration_aidl_default",
"audio_policy_configuration_aidl_default",
"audio_policy_engine_configuration_aidl_default",
],
shared_libs: [
"android.hardware.bluetooth.audio-impl",
"libaudio_aidl_conversion_common_ndk",
"libaudioutils",
"libbluetooth_audio_session_aidl",
"liblog",
"libmedia_helper",
"libmediautils_vendor",
"libstagefright_foundation",
],
export_shared_lib_headers: [
"libaudio_aidl_conversion_common_ndk",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
"-DBACKEND_NDK",
],
}
cc_library {
name: "libeffectconfig",
srcs: [
"EffectConfig.cpp",
],
defaults: [
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
"libaudioutils",
"libaudio_aidl_conversion_common_ndk",
"libbase",
"libbinder_ndk",
"liblog",
"libmedia_helper",
"libtinyxml2",
"libutils",
],
header_libs: [
"libaudio_system_headers",
"libaudioaidl_headers",
],
export_shared_lib_headers: [
"libtinyxml2",
],
export_include_dirs: [
"include",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-DBACKEND_NDK",
],
vendor: true,
host_supported: true,
target: {
android: {
shared_libs: [
"libapexsupport",
],
},
},
}
cc_binary {
name: "android.hardware.audio.service-aidl.example",
relative_install_path: "hw",
defaults: [
"aidlaudioservice_defaults",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_core_ndk_shared",
"latest_android_hardware_bluetooth_audio_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
],
static_libs: [
"libaudioserviceexampleimpl",
],
shared_libs: [
"android.hardware.bluetooth.audio-impl",
"libaudio_aidl_conversion_common_ndk",
"libbluetooth_audio_session_aidl",
"liblog",
"libmedia_helper",
"libstagefright_foundation",
],
srcs: ["main.cpp"],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
"-DBACKEND_NDK",
],
installable: false, //installed in apex com.android.hardware.audio
}
cc_test {
name: "audio_policy_config_xml_converter_tests",
vendor_available: true,
defaults: [
"latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_core_ndk_static",
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
"libaudioaidlcommon",
"libaudioutils",
"libbase",
"libbinder_ndk",
"libcutils",
"libfmq",
"libmedia_helper",
"libstagefright_foundation",
"libutils",
"libxml2",
],
header_libs: [
"libaudio_system_headers",
"libaudioaidl_headers",
"libxsdc-utils",
],
generated_sources: [
"audio_policy_configuration_aidl_default",
"audio_policy_engine_configuration_aidl_default",
],
generated_headers: [
"audio_policy_configuration_aidl_default",
"audio_policy_engine_configuration_aidl_default",
],
srcs: [
"AudioPolicyConfigXmlConverter.cpp",
"tests/AudioPolicyConfigXmlConverterTest.cpp",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
"-DBACKEND_NDK",
],
test_suites: ["general-tests"],
}
cc_test {
name: "audio_alsa_utils_tests",
vendor_available: true,
defaults: [
"latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_core_ndk_static",
],
static_libs: [
"libalsautilsv2",
"libtinyalsav2",
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
"libaudioaidlcommon",
"libaudioutils",
"libbase",
"libbinder_ndk",
"libcutils",
"libfmq",
"libmedia_helper",
"libstagefright_foundation",
"libutils",
],
header_libs: [
"libaudio_system_headers",
"libaudioaidl_headers",
],
srcs: [
"alsa/Utils.cpp",
"tests/AlsaUtilsTest.cpp",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
"-DBACKEND_NDK",
],
test_suites: ["general-tests"],
}
cc_defaults {
name: "aidlaudioeffectservice_defaults",
defaults: [
"latest_android_media_audio_common_types_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
],
vendor: true,
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
"libaudioaidlcommon",
"libaudioutils",
"libbase",
"libbinder_ndk",
"libcutils",
"libfmq",
"liblog",
"libutils",
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
],
header_libs: [
"libaudioaidl_headers",
"libaudio_system_headers",
"libsystem_headers",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
"-DBACKEND_NDK",
],
}
filegroup {
name: "effectCommonFile",
srcs: [
"EffectContext.cpp",
"EffectThread.cpp",
"EffectImpl.cpp",
],
}
cc_binary {
name: "android.hardware.audio.effect.service-aidl.example",
relative_install_path: "hw",
defaults: ["aidlaudioeffectservice_defaults"],
shared_libs: [
"libapexsupport",
"libeffectconfig",
],
srcs: [
"EffectFactory.cpp",
"EffectMain.cpp",
],
installable: false, //installed in apex com.android.hardware.audio
}
cc_library_headers {
name: "libaudioaidl_headers",
export_include_dirs: ["include"],
vendor_available: true,
host_supported: true,
}
prebuilt_etc {
name: "android.hardware.audio.service-aidl.example.rc",
src: "android.hardware.audio.service-aidl.example.rc",
installable: false,
}
prebuilt_etc {
name: "android.hardware.audio.service-aidl.xml",
src: "android.hardware.audio.service-aidl.xml",
sub_dir: "vintf",
installable: false,
}

View File

@@ -0,0 +1,198 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <fcntl.h>
#include <inttypes.h>
#include <unistd.h>
#include <functional>
#include <unordered_map>
#define LOG_TAG "AHAL_ApmXmlConverter"
#include <android-base/logging.h>
#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <system/audio-base-utils.h>
#include "core-impl/AidlConversionXsdc.h"
#include "core-impl/AudioPolicyConfigXmlConverter.h"
#include "core-impl/XsdcConversion.h"
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioHalEngineConfig;
using aidl::android::media::audio::common::AudioHalVolumeCurve;
using aidl::android::media::audio::common::AudioHalVolumeGroup;
using aidl::android::media::audio::common::AudioStreamType;
namespace ap_xsd = android::audio::policy::configuration;
namespace aidl::android::hardware::audio::core::internal {
static const int kDefaultVolumeIndexMin = 0;
static const int kDefaultVolumeIndexMax = 100;
static const int KVolumeIndexDeferredToAudioService = -1;
ConversionResult<AudioHalVolumeCurve> AudioPolicyConfigXmlConverter::convertVolumeCurveToAidl(
const ap_xsd::Volume& xsdcVolumeCurve) {
AudioHalVolumeCurve aidlVolumeCurve;
aidlVolumeCurve.deviceCategory =
static_cast<AudioHalVolumeCurve::DeviceCategory>(xsdcVolumeCurve.getDeviceCategory());
if (xsdcVolumeCurve.hasRef()) {
if (mVolumesReferenceMap.empty()) {
mVolumesReferenceMap = generateReferenceMap<ap_xsd::Volumes, ap_xsd::Reference>(
getXsdcConfig()->getVolumes());
}
aidlVolumeCurve.curvePoints = VALUE_OR_FATAL(
(convertCollectionToAidl<std::string, AudioHalVolumeCurve::CurvePoint>(
mVolumesReferenceMap.at(xsdcVolumeCurve.getRef()).getPoint(),
&convertCurvePointToAidl)));
} else {
aidlVolumeCurve.curvePoints = VALUE_OR_FATAL(
(convertCollectionToAidl<std::string, AudioHalVolumeCurve::CurvePoint>(
xsdcVolumeCurve.getPoint(), &convertCurvePointToAidl)));
}
return aidlVolumeCurve;
}
void AudioPolicyConfigXmlConverter::mapStreamToVolumeCurve(const ap_xsd::Volume& xsdcVolumeCurve) {
mStreamToVolumeCurvesMap[xsdcVolumeCurve.getStream()].push_back(
VALUE_OR_FATAL(convertVolumeCurveToAidl(xsdcVolumeCurve)));
}
const SurroundSoundConfig& AudioPolicyConfigXmlConverter::getSurroundSoundConfig() {
static const SurroundSoundConfig aidlSurroundSoundConfig = [this]() {
if (auto xsdcConfig = getXsdcConfig(); xsdcConfig && xsdcConfig->hasSurroundSound()) {
auto configConv = xsdc2aidl_SurroundSoundConfig(*xsdcConfig->getFirstSurroundSound());
if (configConv.ok()) {
return configConv.value();
}
LOG(ERROR) << "There was an error converting surround formats to AIDL: "
<< configConv.error();
}
LOG(WARNING) << "Audio policy config does not have <surroundSound> section, using default";
return getDefaultSurroundSoundConfig();
}();
return aidlSurroundSoundConfig;
}
std::unique_ptr<AudioPolicyConfigXmlConverter::ModuleConfigs>
AudioPolicyConfigXmlConverter::releaseModuleConfigs() {
return std::move(mModuleConfigurations);
}
const AudioHalEngineConfig& AudioPolicyConfigXmlConverter::getAidlEngineConfig() {
if (mAidlEngineConfig.volumeGroups.empty() && getXsdcConfig() &&
getXsdcConfig()->hasVolumes()) {
parseVolumes();
}
return mAidlEngineConfig;
}
// static
const SurroundSoundConfig& AudioPolicyConfigXmlConverter::getDefaultSurroundSoundConfig() {
// Provide a config similar to the one used by the framework by default
// (see AudioPolicyConfig::setDefaultSurroundFormats).
#define ENCODED_FORMAT(format) \
AudioFormatDescription { \
.encoding = ::android::format \
}
#define SIMPLE_FORMAT(format) \
SurroundSoundConfig::SurroundFormatFamily { \
.primaryFormat = ENCODED_FORMAT(format) \
}
static const SurroundSoundConfig defaultConfig = {
.formatFamilies = {
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_AC3),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_EAC3),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_HD),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_HD_MA),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD),
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_EAC3_JOC),
SurroundSoundConfig::SurroundFormatFamily{
.primaryFormat = ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_LC),
.subFormats =
{
ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_HE_V1),
ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_HE_V2),
ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_ELD),
ENCODED_FORMAT(MEDIA_MIMETYPE_AUDIO_AAC_XHE),
}},
SIMPLE_FORMAT(MEDIA_MIMETYPE_AUDIO_AC4),
}};
#undef SIMPLE_FORMAT
#undef ENCODED_FORMAT
return defaultConfig;
}
void AudioPolicyConfigXmlConverter::mapStreamsToVolumeCurves() {
if (getXsdcConfig()->hasVolumes()) {
for (const ap_xsd::Volumes& xsdcWrapperType : getXsdcConfig()->getVolumes()) {
for (const ap_xsd::Volume& xsdcVolume : xsdcWrapperType.getVolume()) {
mapStreamToVolumeCurve(xsdcVolume);
}
}
}
}
void AudioPolicyConfigXmlConverter::addVolumeGroupstoEngineConfig() {
for (const auto& [xsdcStream, volumeCurves] : mStreamToVolumeCurvesMap) {
AudioHalVolumeGroup volumeGroup;
volumeGroup.name = ap_xsd::toString(xsdcStream);
if (static_cast<int>(xsdcStream) >= AUDIO_STREAM_PUBLIC_CNT) {
volumeGroup.minIndex = kDefaultVolumeIndexMin;
volumeGroup.maxIndex = kDefaultVolumeIndexMax;
} else {
volumeGroup.minIndex = KVolumeIndexDeferredToAudioService;
volumeGroup.maxIndex = KVolumeIndexDeferredToAudioService;
}
volumeGroup.volumeCurves = volumeCurves;
mAidlEngineConfig.volumeGroups.push_back(std::move(volumeGroup));
}
}
void AudioPolicyConfigXmlConverter::parseVolumes() {
if (mStreamToVolumeCurvesMap.empty() && getXsdcConfig()->hasVolumes()) {
mapStreamsToVolumeCurves();
addVolumeGroupstoEngineConfig();
}
}
void AudioPolicyConfigXmlConverter::init() {
if (!getXsdcConfig()->hasModules()) return;
for (const ap_xsd::Modules& xsdcModulesType : getXsdcConfig()->getModules()) {
if (!xsdcModulesType.has_module()) continue;
for (const ap_xsd::Modules::Module& xsdcModule : xsdcModulesType.get_module()) {
// 'primary' in the XML schema used by HIDL is equivalent to 'default' module.
const std::string name =
xsdcModule.getName() != "primary" ? xsdcModule.getName() : "default";
if (name != "r_submix") {
mModuleConfigurations->emplace_back(
name, VALUE_OR_FATAL(convertModuleConfigToAidl(xsdcModule)));
} else {
// See the note on the 'getRSubmixConfiguration' function.
mModuleConfigurations->emplace_back(name, nullptr);
}
}
}
}
} // namespace aidl::android::hardware::audio::core::internal

130
audio/Bluetooth.cpp Normal file
View File

@@ -0,0 +1,130 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_Bluetooth"
#include <android-base/logging.h>
#include "core-impl/Bluetooth.h"
using aidl::android::hardware::audio::core::VendorParameter;
using aidl::android::media::audio::common::Boolean;
using aidl::android::media::audio::common::Float;
using aidl::android::media::audio::common::Int;
namespace aidl::android::hardware::audio::core {
Bluetooth::Bluetooth() {
mScoConfig.isEnabled = Boolean{false};
mScoConfig.isNrecEnabled = Boolean{false};
mScoConfig.mode = ScoConfig::Mode::SCO;
mHfpConfig.isEnabled = Boolean{false};
mHfpConfig.sampleRate = Int{8000};
mHfpConfig.volume = Float{HfpConfig::VOLUME_MAX};
}
ndk::ScopedAStatus Bluetooth::setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) {
if (in_config.isEnabled.has_value()) {
mScoConfig.isEnabled = in_config.isEnabled;
}
if (in_config.isNrecEnabled.has_value()) {
mScoConfig.isNrecEnabled = in_config.isNrecEnabled;
}
if (in_config.mode != ScoConfig::Mode::UNSPECIFIED) {
mScoConfig.mode = in_config.mode;
}
if (in_config.debugName.has_value()) {
mScoConfig.debugName = in_config.debugName;
}
*_aidl_return = mScoConfig;
LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning "
<< _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Bluetooth::setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) {
if (in_config.sampleRate.has_value() && in_config.sampleRate.value().value <= 0) {
LOG(ERROR) << __func__ << ": invalid sample rate: " << in_config.sampleRate.value().value;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (in_config.volume.has_value() && (in_config.volume.value().value < HfpConfig::VOLUME_MIN ||
in_config.volume.value().value > HfpConfig::VOLUME_MAX)) {
LOG(ERROR) << __func__ << ": invalid volume: " << in_config.volume.value().value;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (in_config.isEnabled.has_value()) {
mHfpConfig.isEnabled = in_config.isEnabled;
}
if (in_config.sampleRate.has_value()) {
mHfpConfig.sampleRate = in_config.sampleRate;
}
if (in_config.volume.has_value()) {
mHfpConfig.volume = in_config.volume;
}
*_aidl_return = mHfpConfig;
LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning "
<< _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::isEnabled(bool* _aidl_return) {
*_aidl_return = mEnabled;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::setEnabled(bool in_enabled) {
mEnabled = in_enabled;
LOG(DEBUG) << __func__ << ": " << mEnabled;
if (mHandler) return mHandler();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::supportsOffloadReconfiguration(bool* _aidl_return) {
*_aidl_return = false;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters
__unused) {
LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters);
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus BluetoothLe::isEnabled(bool* _aidl_return) {
*_aidl_return = mEnabled;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::setEnabled(bool in_enabled) {
mEnabled = in_enabled;
if (mHandler) return mHandler();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::supportsOffloadReconfiguration(bool* _aidl_return) {
*_aidl_return = false;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters
__unused) {
LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters);
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,405 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_Config"
#include <aidl/android/media/audio/common/AudioProductStrategyType.h>
#include <android-base/logging.h>
#include <media/AidlConversionCppNdk.h>
#include <media/TypeConverter.h>
#include <media/convert.h>
#include <utils/FastStrcmp.h>
#include "core-impl/CapEngineConfigXmlConverter.h"
#include "core-impl/XsdcConversion.h"
using aidl::android::hardware::audio::common::iequals;
using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioHalCapConfiguration;
using aidl::android::media::audio::common::AudioHalCapCriterionV2;
using aidl::android::media::audio::common::AudioHalCapDomain;
using aidl::android::media::audio::common::AudioHalCapParameter;
using aidl::android::media::audio::common::AudioHalCapRule;
using aidl::android::media::audio::common::AudioPolicyForceUse;
using aidl::android::media::audio::common::AudioSource;
using aidl::android::media::audio::common::AudioStreamType;
using ::android::BAD_VALUE;
using ::android::base::unexpected;
using ::android::utilities::convertTo;
namespace eng_xsd = android::audio::policy::capengine::configuration;
namespace aidl::android::hardware::audio::core::internal {
static constexpr const char* gStrategiesParameter = "product_strategies";
static constexpr const char* gInputSourcesParameter = "input_sources";
static constexpr const char* gStreamsParameter = "streams";
static constexpr const char* gOutputDevicesParameter = "selected_output_devices";
static constexpr const char* gOutputDeviceAddressParameter = "device_address";
static constexpr const char* gStrategyPrefix = "vx_";
static constexpr const char* gLegacyStrategyPrefix = "STRATEGY_";
static constexpr const char* gLegacyOutputDevicePrefix = "AUDIO_DEVICE_OUT_";
static constexpr const char* gLegacyInputDevicePrefix = "AUDIO_DEVICE_IN_";
static constexpr const char* gLegacyStreamPrefix = "AUDIO_STREAM_";
static constexpr const char* gLegacySourcePrefix = "AUDIO_SOURCE_";
std::optional<std::vector<std::optional<AudioHalCapDomain>>>&
CapEngineConfigXmlConverter::getAidlCapEngineConfig() {
return mAidlCapDomains;
}
ConversionResult<AudioHalCapRule::CriterionRule> convertCriterionRuleToAidl(
const eng_xsd::SelectionCriterionRuleType& xsdcRule) {
using Tag = AudioHalCapCriterionV2::Tag;
AudioHalCapRule::CriterionRule rule{};
std::string criterionName = xsdcRule.getSelectionCriterion();
std::string criterionValue = xsdcRule.getValue();
if (iequals(criterionName, toString(Tag::availableInputDevices))) {
AudioHalCapCriterionV2::AvailableDevices value;
value.values.emplace_back(VALUE_OR_RETURN(
convertDeviceTypeToAidl(gLegacyInputDevicePrefix + criterionValue)));
rule.criterionAndValue = AudioHalCapCriterionV2::make<Tag::availableInputDevices>(value);
} else if (iequals(criterionName, toString(Tag::availableOutputDevices))) {
AudioHalCapCriterionV2::AvailableDevices value;
value.values.emplace_back(VALUE_OR_RETURN(
convertDeviceTypeToAidl(gLegacyOutputDevicePrefix + criterionValue)));
rule.criterionAndValue = AudioHalCapCriterionV2::make<Tag::availableOutputDevices>(value);
} else if (iequals(criterionName, toString(Tag::availableInputDevicesAddresses))) {
AudioHalCapCriterionV2::AvailableDevicesAddresses value;
value.values.emplace_back(criterionValue);
rule.criterionAndValue =
AudioHalCapCriterionV2::make<Tag::availableInputDevicesAddresses>(value);
} else if (iequals(criterionName, toString(Tag::availableOutputDevicesAddresses))) {
AudioHalCapCriterionV2::AvailableDevicesAddresses value;
value.values.emplace_back(criterionValue);
rule.criterionAndValue =
AudioHalCapCriterionV2::make<Tag::availableOutputDevicesAddresses>(value);
} else if (iequals(criterionName, toString(Tag::telephonyMode))) {
AudioHalCapCriterionV2::TelephonyMode value;
value.values.emplace_back(VALUE_OR_RETURN(convertTelephonyModeToAidl(criterionValue)));
rule.criterionAndValue = AudioHalCapCriterionV2::make<Tag::telephonyMode>(value);
} else if (!fastcmp<strncmp>(criterionName.c_str(), kXsdcForceConfigForUse,
strlen(kXsdcForceConfigForUse))) {
AudioHalCapCriterionV2::ForceConfigForUse value;
value.values.emplace_back(
VALUE_OR_RETURN(convertForceUseToAidl(criterionName, criterionValue)));
rule.criterionAndValue = AudioHalCapCriterionV2::make<Tag::forceConfigForUse>(value);
} else {
LOG(ERROR) << __func__ << " unrecognized criterion " << criterionName;
return unexpected(BAD_VALUE);
}
if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Excludes) {
rule.matchingRule = AudioHalCapRule::MatchingRule::EXCLUDES;
} else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Includes) {
rule.matchingRule = AudioHalCapRule::MatchingRule::INCLUDES;
} else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::Is) {
rule.matchingRule = AudioHalCapRule::MatchingRule::IS;
} else if (xsdcRule.getMatchesWhen() == eng_xsd::MatchesWhenEnum::IsNot) {
rule.matchingRule = AudioHalCapRule::MatchingRule::IS_NOT;
} else {
LOG(ERROR) << "Unsupported match when rule.";
return unexpected(BAD_VALUE);
}
return rule;
}
ConversionResult<AudioHalCapRule> convertRule(const eng_xsd::CompoundRuleType& xsdcCompoundRule) {
AudioHalCapRule rule{};
bool isPreviousCompoundRule = true;
if (xsdcCompoundRule.getType() == eng_xsd::TypeEnum::Any) {
rule.compoundRule = AudioHalCapRule::CompoundRule::ANY;
} else if (xsdcCompoundRule.getType() == eng_xsd::TypeEnum::All) {
rule.compoundRule = AudioHalCapRule::CompoundRule::ALL;
} else {
LOG(ERROR) << "Unsupported compound rule type.";
return unexpected(BAD_VALUE);
}
for (const auto& childXsdcCoumpoundRule : xsdcCompoundRule.getCompoundRule_optional()) {
if (childXsdcCoumpoundRule.hasCompoundRule_optional()) {
rule.nestedRules.push_back(VALUE_OR_FATAL(convertRule(childXsdcCoumpoundRule)));
} else if (childXsdcCoumpoundRule.hasSelectionCriterionRule_optional()) {
rule.nestedRules.push_back(VALUE_OR_FATAL(convertRule(childXsdcCoumpoundRule)));
}
}
if (xsdcCompoundRule.hasSelectionCriterionRule_optional()) {
for (const auto& xsdcRule : xsdcCompoundRule.getSelectionCriterionRule_optional()) {
rule.criterionRules.push_back(VALUE_OR_FATAL(convertCriterionRuleToAidl(xsdcRule)));
}
}
return rule;
}
ConversionResult<int> getAudioProductStrategyId(const std::string& path) {
std::vector<std::string> strings;
std::istringstream pathStream(path);
std::string stringToken;
while (getline(pathStream, stringToken, '/')) {
std::size_t pos = stringToken.find(gStrategyPrefix);
if (pos != std::string::npos) {
std::string strategyIdLiteral = stringToken.substr(pos + std::strlen(gStrategyPrefix));
int strategyId;
if (!convertTo(strategyIdLiteral, strategyId)) {
LOG(ERROR) << "Invalid strategy " << stringToken << " from path " << path;
return unexpected(BAD_VALUE);
}
return strategyId;
}
pos = stringToken.find(gLegacyStrategyPrefix);
if (pos != std::string::npos) {
std::string legacyStrategyIdLiteral = stringToken.substr(pos);
const auto legacyStrategies = getLegacyProductStrategyMap();
if (const auto& it = legacyStrategies.find(legacyStrategyIdLiteral);
it != legacyStrategies.end()) {
return it->second;
}
LOG(ERROR) << "Invalid legacy strategy " << stringToken << " from path " << path;
return unexpected(BAD_VALUE);
}
}
return unexpected(BAD_VALUE);
}
ConversionResult<AudioSource> getAudioSource(const std::string& path) {
std::vector<std::string> strings;
std::istringstream pathStream(path);
std::string stringToken;
while (getline(pathStream, stringToken, '/')) {
if (stringToken.find(gInputSourcesParameter) != std::string::npos) {
getline(pathStream, stringToken, '/');
std::transform(stringToken.begin(), stringToken.end(), stringToken.begin(),
[](char c) { return std::toupper(c); });
std::string legacySourceLiteral = "AUDIO_SOURCE_" + stringToken;
audio_source_t legacySource;
if (!::android::SourceTypeConverter::fromString(legacySourceLiteral, legacySource)) {
LOG(ERROR) << "Invalid source " << stringToken << " from path " << path;
return unexpected(BAD_VALUE);
}
return legacy2aidl_audio_source_t_AudioSource(legacySource);
}
}
return unexpected(BAD_VALUE);
}
ConversionResult<AudioStreamType> getAudioStreamType(const std::string& path) {
std::vector<std::string> strings;
std::istringstream pathStream(path);
std::string stringToken;
while (getline(pathStream, stringToken, '/')) {
if (stringToken.find(gStreamsParameter) != std::string::npos) {
getline(pathStream, stringToken, '/');
std::transform(stringToken.begin(), stringToken.end(), stringToken.begin(),
[](char c) { return std::toupper(c); });
std::string legacyStreamLiteral = std::string(gLegacyStreamPrefix) + stringToken;
audio_stream_type_t legacyStream;
if (!::android::StreamTypeConverter::fromString(legacyStreamLiteral, legacyStream)) {
LOG(ERROR) << "Invalid stream " << stringToken << " from path " << path;
return unexpected(BAD_VALUE);
}
return legacy2aidl_audio_stream_type_t_AudioStreamType(legacyStream);
}
}
return unexpected(BAD_VALUE);
}
ConversionResult<std::string> toUpperAndAppendPrefix(const std::string& capName,
const std::string& legacyPrefix) {
std::string legacyName = capName;
std::transform(legacyName.begin(), legacyName.end(), legacyName.begin(),
[](char c) { return std::toupper(c); });
return legacyPrefix + legacyName;
}
ConversionResult<AudioHalCapParameter> CapEngineConfigXmlConverter::convertParamToAidl(
const eng_xsd::ConfigurableElementSettingsType& element) {
const auto& path = element.getPath();
AudioHalCapParameter parameterSetting;
if (path.find(gStrategiesParameter) != std::string::npos) {
int strategyId = VALUE_OR_FATAL(getAudioProductStrategyId(path));
if (path.find(gOutputDevicesParameter) != std::string::npos) {
// Value is 1 or 0
if (!element.hasBitParameter_optional()) {
LOG(ERROR) << "Invalid strategy value type";
return unexpected(BAD_VALUE);
}
// Convert name to output device type
const auto* xsdcParam = element.getFirstBitParameter_optional();
std::string outputDevice = VALUE_OR_FATAL(toUpperAndAppendPrefix(
eng_xsd::toString(xsdcParam->getName()), gLegacyOutputDevicePrefix));
audio_devices_t legacyType;
if (!::android::OutputDeviceConverter::fromString(outputDevice, legacyType)) {
LOG(ERROR) << "Invalid strategy device type " << outputDevice;
return unexpected(BAD_VALUE);
}
AudioDeviceDescription aidlDevice =
VALUE_OR_FATAL(legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
bool isSelected;
if (!convertTo(xsdcParam->getValue(), isSelected)) {
LOG(ERROR) << "Invalid strategy device selection value " << xsdcParam->getValue();
return unexpected(BAD_VALUE);
}
parameterSetting =
AudioHalCapParameter::StrategyDevice(aidlDevice, strategyId, isSelected);
} else if (path.find(gOutputDeviceAddressParameter) != std::string::npos) {
// Value is the address
if (!element.hasStringParameter_optional()) {
return unexpected(BAD_VALUE);
}
std::string address = element.getFirstStringParameter_optional()->getValue();
parameterSetting = AudioHalCapParameter::StrategyDeviceAddress(
AudioDeviceAddress(address), strategyId);
}
} else if (path.find(gInputSourcesParameter) != std::string::npos) {
// Value is 1 or 0
if (!element.hasBitParameter_optional()) {
LOG(ERROR) << "Invalid source value type";
return unexpected(BAD_VALUE);
}
AudioSource audioSourceAidl = VALUE_OR_FATAL(getAudioSource(path));
const auto* xsdcParam = element.getFirstBitParameter_optional();
std::string inputDeviceLiteral = VALUE_OR_FATAL(toUpperAndAppendPrefix(
eng_xsd::toString(xsdcParam->getName()), gLegacyInputDevicePrefix));
audio_devices_t inputDeviceType;
if (!::android::InputDeviceConverter::fromString(inputDeviceLiteral, inputDeviceType)) {
LOG(ERROR) << "Invalid source device type " << inputDeviceLiteral;
return unexpected(BAD_VALUE);
}
AudioDeviceDescription aidlDevice =
VALUE_OR_FATAL(legacy2aidl_audio_devices_t_AudioDeviceDescription(inputDeviceType));
bool isSelected;
if (!convertTo(xsdcParam->getValue(), isSelected)) {
LOG(ERROR) << "Invalid source value type " << xsdcParam->getValue();
return unexpected(BAD_VALUE);
}
parameterSetting =
AudioHalCapParameter::InputSourceDevice(aidlDevice, audioSourceAidl, isSelected);
} else if (path.find(gStreamsParameter) != std::string::npos) {
AudioStreamType audioStreamAidl = VALUE_OR_FATAL(getAudioStreamType(path));
if (!element.hasEnumParameter_optional()) {
LOG(ERROR) << "Invalid stream value type";
return unexpected(BAD_VALUE);
}
const auto* xsdcParam = element.getFirstEnumParameter_optional();
std::string profileLiteral =
VALUE_OR_FATAL(toUpperAndAppendPrefix(xsdcParam->getValue(), gLegacyStreamPrefix));
audio_stream_type_t profileLegacyStream;
if (!::android::StreamTypeConverter::fromString(profileLiteral, profileLegacyStream)) {
LOG(ERROR) << "Invalid stream value " << profileLiteral;
return unexpected(BAD_VALUE);
}
AudioStreamType profileStreamAidl = VALUE_OR_FATAL(
legacy2aidl_audio_stream_type_t_AudioStreamType(profileLegacyStream));
parameterSetting =
AudioHalCapParameter::StreamVolumeProfile(audioStreamAidl, profileStreamAidl);
}
return parameterSetting;
}
ConversionResult<std::vector<AudioHalCapParameter>>
CapEngineConfigXmlConverter::convertSettingToAidl(
const eng_xsd::SettingsType::Configuration& xsdcSetting) {
std::vector<AudioHalCapParameter> aidlCapParameterSettings;
for (const auto& element : xsdcSetting.getConfigurableElement()) {
aidlCapParameterSettings.push_back(VALUE_OR_FATAL(convertParamToAidl(element)));
}
return aidlCapParameterSettings;
}
ConversionResult<AudioHalCapConfiguration> CapEngineConfigXmlConverter::convertConfigurationToAidl(
const eng_xsd::ConfigurationsType::Configuration& xsdcConfiguration,
const eng_xsd::SettingsType::Configuration& xsdcSettingConfiguration) {
AudioHalCapConfiguration aidlCapConfiguration;
aidlCapConfiguration.name = xsdcConfiguration.getName();
if (xsdcConfiguration.hasCompoundRule()) {
if (xsdcConfiguration.getCompoundRule().size() != 1) {
return unexpected(BAD_VALUE);
}
aidlCapConfiguration.rule =
VALUE_OR_FATAL(convertRule(xsdcConfiguration.getCompoundRule()[0]));
aidlCapConfiguration.parameterSettings =
VALUE_OR_FATAL(convertSettingToAidl(xsdcSettingConfiguration));
}
return aidlCapConfiguration;
}
ConversionResult<eng_xsd::SettingsType::Configuration> getConfigurationByName(
const std::string& name, const std::vector<eng_xsd::SettingsType>& xsdcSettingsVec) {
for (const auto& xsdcSettings : xsdcSettingsVec) {
for (const auto& xsdcConfiguration : xsdcSettings.getConfiguration()) {
if (xsdcConfiguration.getName() == name) {
return xsdcConfiguration;
}
}
}
LOG(ERROR) << __func__ << " failed to find configuration " << name;
return unexpected(BAD_VALUE);
}
ConversionResult<std::vector<AudioHalCapConfiguration>>
CapEngineConfigXmlConverter::convertConfigurationsToAidl(
const std::vector<eng_xsd::ConfigurationsType>& xsdcConfigurationsVec,
const std::vector<eng_xsd::SettingsType>& xsdcSettingsVec) {
if (xsdcConfigurationsVec.empty() || xsdcSettingsVec.empty()) {
LOG(ERROR) << __func__ << " empty configurations/settings";
return unexpected(BAD_VALUE);
}
std::vector<AudioHalCapConfiguration> aidlConfigurations;
for (const auto& xsdcConfigurations : xsdcConfigurationsVec) {
for (const auto& xsdcConfiguration : xsdcConfigurations.getConfiguration()) {
auto xsdcSettingConfiguration = VALUE_OR_FATAL(
getConfigurationByName(xsdcConfiguration.getName(), xsdcSettingsVec));
aidlConfigurations.push_back(VALUE_OR_FATAL(
convertConfigurationToAidl(xsdcConfiguration, xsdcSettingConfiguration)));
}
}
return aidlConfigurations;
}
ConversionResult<AudioHalCapDomain> CapEngineConfigXmlConverter::convertConfigurableDomainToAidl(
const eng_xsd::ConfigurableDomainType& xsdcConfigurableDomain) {
AudioHalCapDomain aidlConfigurableDomain;
aidlConfigurableDomain.name = xsdcConfigurableDomain.getName();
if (xsdcConfigurableDomain.hasSequenceAware() && xsdcConfigurableDomain.getSequenceAware()) {
LOG(ERROR) << "sequence aware not supported.";
return unexpected(BAD_VALUE);
}
if (xsdcConfigurableDomain.hasConfigurations() && xsdcConfigurableDomain.hasSettings()) {
aidlConfigurableDomain.configurations = VALUE_OR_FATAL(convertConfigurationsToAidl(
xsdcConfigurableDomain.getConfigurations(), xsdcConfigurableDomain.getSettings()));
}
return aidlConfigurableDomain;
}
void CapEngineConfigXmlConverter::init() {
if (getXsdcConfig()->hasConfigurableDomain()) {
mAidlCapDomains = std::make_optional<>(VALUE_OR_FATAL(
(convertCollectionToAidlOptionalValues<eng_xsd::ConfigurableDomainType,
AudioHalCapDomain>(
getXsdcConfig()->getConfigurableDomain(),
std::bind(&CapEngineConfigXmlConverter::convertConfigurableDomainToAidl,
this, std::placeholders::_1)))));
} else {
mAidlCapDomains = std::nullopt;
}
}
} // namespace aidl::android::hardware::audio::core::internal

67
audio/Config.cpp Normal file
View File

@@ -0,0 +1,67 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_Config"
#include <android-base/logging.h>
#include <system/audio_config.h>
#include "core-impl/AudioPolicyConfigXmlConverter.h"
#include "core-impl/Config.h"
#include "core-impl/EngineConfigXmlConverter.h"
using aidl::android::media::audio::common::AudioHalEngineConfig;
namespace aidl::android::hardware::audio::core {
ndk::ScopedAStatus Config::getSurroundSoundConfig(SurroundSoundConfig* _aidl_return) {
static const auto& func = __func__;
static const SurroundSoundConfig surroundSoundConfig = [this]() {
SurroundSoundConfig surroundCfg = mAudioPolicyConverter.getSurroundSoundConfig();
if (mAudioPolicyConverter.getStatus() != ::android::OK) {
LOG(WARNING) << func << ": " << mAudioPolicyConverter.getError();
}
return surroundCfg;
}();
*_aidl_return = surroundSoundConfig;
LOG(DEBUG) << __func__ << ": returning " << _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Config::getEngineConfig(AudioHalEngineConfig* _aidl_return) {
static const auto& func = __func__;
static const AudioHalEngineConfig returnEngCfg = [this]() {
AudioHalEngineConfig engConfig;
if (mEngConfigConverter.getStatus() == ::android::OK) {
engConfig = mEngConfigConverter.getAidlEngineConfig();
} else {
LOG(INFO) << func << ": " << mEngConfigConverter.getError();
if (mAudioPolicyConverter.getStatus() == ::android::OK) {
engConfig = mAudioPolicyConverter.getAidlEngineConfig();
} else {
LOG(WARNING) << func << ": " << mAudioPolicyConverter.getError();
}
}
// Logging full contents of the config is an overkill, just provide statistics.
LOG(DEBUG) << func
<< ": number of strategies parsed: " << engConfig.productStrategies.size()
<< ", default strategy: " << engConfig.defaultProductStrategyId
<< ", number of volume groups parsed: " << engConfig.volumeGroups.size();
return engConfig;
}();
*_aidl_return = returnEngCfg;
return ndk::ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::audio::core

707
audio/Configuration.cpp Normal file
View File

@@ -0,0 +1,707 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <Utils.h>
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
#include <aidl/android/media/audio/common/AudioDeviceType.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioFormatType.h>
#include <aidl/android/media/audio/common/AudioIoFlags.h>
#include <aidl/android/media/audio/common/AudioOutputFlags.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include "core-impl/Configuration.h"
using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioGainConfig;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioOutputFlags;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::AudioPortDeviceExt;
using aidl::android::media::audio::common::AudioPortExt;
using aidl::android::media::audio::common::AudioPortMixExt;
using aidl::android::media::audio::common::AudioProfile;
using aidl::android::media::audio::common::Int;
using aidl::android::media::audio::common::PcmType;
using Configuration = aidl::android::hardware::audio::core::Module::Configuration;
namespace aidl::android::hardware::audio::core::internal {
static void fillProfile(AudioProfile* profile, const std::vector<int32_t>& channelLayouts,
const std::vector<int32_t>& sampleRates) {
for (auto layout : channelLayouts) {
profile->channelMasks.push_back(
AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout));
}
profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end());
}
static AudioProfile createProfile(PcmType pcmType, const std::vector<int32_t>& channelLayouts,
const std::vector<int32_t>& sampleRates) {
AudioProfile profile;
profile.format.type = AudioFormatType::PCM;
profile.format.pcm = pcmType;
fillProfile(&profile, channelLayouts, sampleRates);
return profile;
}
static AudioProfile createProfile(const std::string& encodingType,
const std::vector<int32_t>& channelLayouts,
const std::vector<int32_t>& sampleRates) {
AudioProfile profile;
profile.format.encoding = encodingType;
fillProfile(&profile, channelLayouts, sampleRates);
return profile;
}
static AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags,
std::string connection = "") {
AudioPortDeviceExt deviceExt;
deviceExt.device.type.type = devType;
if (devType == AudioDeviceType::IN_MICROPHONE && connection.empty()) {
deviceExt.device.address = "bottom";
} else if (devType == AudioDeviceType::IN_MICROPHONE_BACK && connection.empty()) {
deviceExt.device.address = "back";
}
deviceExt.device.type.connection = std::move(connection);
deviceExt.flags = flags;
return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
}
static AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount) {
AudioPortMixExt mixExt;
mixExt.maxOpenStreamCount = maxOpenStreamCount;
mixExt.maxActiveStreamCount = maxActiveStreamCount;
return AudioPortExt::make<AudioPortExt::Tag::mix>(mixExt);
}
static AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
const AudioPortExt& ext) {
AudioPort port;
port.id = id;
port.name = name;
port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
: AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
port.ext = ext;
return port;
}
static AudioPortConfig createDynamicPortConfig(int32_t id, int32_t portId, int32_t flags,
bool isInput, const AudioPortExt& ext) {
AudioPortConfig config;
config.id = id;
config.portId = portId;
config.format = AudioFormatDescription{};
config.channelMask = AudioChannelLayout{};
config.sampleRate = Int{.value = 0};
config.gain = AudioGainConfig();
config.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
: AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
config.ext = ext;
return config;
}
static AudioPortConfig createPortConfig(int32_t id, int32_t portId, PcmType pcmType, int32_t layout,
int32_t sampleRate, int32_t flags, bool isInput,
const AudioPortExt& ext) {
AudioPortConfig config = createDynamicPortConfig(id, portId, flags, isInput, ext);
config.sampleRate = Int{.value = sampleRate};
config.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout);
config.format = AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType};
return config;
}
static AudioRoute createRoute(const std::vector<AudioPort>& sources, const AudioPort& sink) {
AudioRoute route;
route.sinkPortId = sink.id;
std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds),
[](const auto& port) { return port.id; });
return route;
}
std::vector<AudioProfile> getStandard16And24BitPcmAudioProfiles() {
auto createStdPcmAudioProfile = [](const PcmType& pcmType) {
return AudioProfile{
.format = AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType},
.channelMasks = {AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
AudioChannelLayout::LAYOUT_MONO),
AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
AudioChannelLayout::LAYOUT_STEREO)},
.sampleRates = {8000, 11025, 16000, 32000, 44100, 48000}};
};
return {
createStdPcmAudioProfile(PcmType::INT_16_BIT),
createStdPcmAudioProfile(PcmType::INT_24_BIT),
};
}
// Primary (default) configuration:
//
// Device ports:
// * "Speaker", OUT_SPEAKER, default
// - no profiles specified
// * "Built-In Mic", IN_MICROPHONE, default
// - no profiles specified
// * "Telephony Tx", OUT_TELEPHONY_TX
// - no profiles specified
// * "Telephony Rx", IN_TELEPHONY_RX
// - no profiles specified
// * "FM Tuner", IN_FM_TUNER
// - no profiles specified
//
// Mix ports:
// * "primary output", PRIMARY, 1 max open, 1 max active stream
// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
// * "primary input", 1 max open, 1 max active stream
// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
// * "telephony_tx", 1 max open, 1 max active stream
// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
// * "telephony_rx", 1 max open, 1 max active stream
// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
// * "fm_tuner", 1 max open, 1 max active stream
// - profile PCM 16-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
//
// Routes:
// "primary out" -> "Speaker"
// "Built-In Mic" -> "primary input"
// "Telephony Rx" -> "telephony_rx"
// "telephony_tx" -> "Telephony Tx"
// "FM Tuner" -> "fm_tuner"
//
// Initial port configs:
// * "Speaker" device port: dynamic configuration
// * "Built-In Mic" device port: dynamic configuration
// * "Telephony Tx" device port: dynamic configuration
// * "Telephony Rx" device port: dynamic configuration
// * "FM Tuner" device port: dynamic configuration
//
std::unique_ptr<Configuration> getPrimaryConfiguration() {
static const Configuration configuration = []() {
const std::vector<AudioProfile> standardPcmAudioProfiles = {
createProfile(PcmType::INT_16_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO},
{8000, 11025, 16000, 32000, 44100, 48000})};
Configuration c;
// Device ports
AudioPort speakerOutDevice =
createPort(c.nextPortId++, "Speaker", 0, false,
createDeviceExt(AudioDeviceType::OUT_SPEAKER,
1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
c.ports.push_back(speakerOutDevice);
c.initialConfigs.push_back(
createDynamicPortConfig(speakerOutDevice.id, speakerOutDevice.id, 0, false,
createDeviceExt(AudioDeviceType::OUT_SPEAKER, 0)));
AudioPort micInDevice =
createPort(c.nextPortId++, "Built-In Mic", 0, true,
createDeviceExt(AudioDeviceType::IN_MICROPHONE,
1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
c.ports.push_back(micInDevice);
c.initialConfigs.push_back(
createDynamicPortConfig(micInDevice.id, micInDevice.id, 0, true,
createDeviceExt(AudioDeviceType::IN_MICROPHONE, 0)));
AudioPort telephonyTxOutDevice =
createPort(c.nextPortId++, "Telephony Tx", 0, false,
createDeviceExt(AudioDeviceType::OUT_TELEPHONY_TX, 0));
c.ports.push_back(telephonyTxOutDevice);
c.initialConfigs.push_back(
createDynamicPortConfig(telephonyTxOutDevice.id, telephonyTxOutDevice.id, 0, false,
createDeviceExt(AudioDeviceType::OUT_TELEPHONY_TX, 0)));
AudioPort telephonyRxInDevice =
createPort(c.nextPortId++, "Telephony Rx", 0, true,
createDeviceExt(AudioDeviceType::IN_TELEPHONY_RX, 0));
c.ports.push_back(telephonyRxInDevice);
c.initialConfigs.push_back(
createDynamicPortConfig(telephonyRxInDevice.id, telephonyRxInDevice.id, 0, true,
createDeviceExt(AudioDeviceType::IN_TELEPHONY_RX, 0)));
AudioPort fmTunerInDevice = createPort(c.nextPortId++, "FM Tuner", 0, true,
createDeviceExt(AudioDeviceType::IN_FM_TUNER, 0));
c.ports.push_back(fmTunerInDevice);
c.initialConfigs.push_back(
createDynamicPortConfig(fmTunerInDevice.id, fmTunerInDevice.id, 0, true,
createDeviceExt(AudioDeviceType::IN_FM_TUNER, 0)));
// Mix ports
AudioPort primaryOutMix = createPort(c.nextPortId++, "primary output",
makeBitPositionFlagMask(AudioOutputFlags::PRIMARY),
false, createPortMixExt(0, 0));
primaryOutMix.profiles.insert(primaryOutMix.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(primaryOutMix);
AudioPort primaryInMix =
createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
primaryInMix.profiles.push_back(
createProfile(PcmType::INT_16_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO},
{8000, 11025, 16000, 32000, 44100, 48000}));
c.ports.push_back(primaryInMix);
AudioPort telephonyTxOutMix =
createPort(c.nextPortId++, "telephony_tx", 0, false, createPortMixExt(1, 1));
telephonyTxOutMix.profiles.insert(telephonyTxOutMix.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(telephonyTxOutMix);
AudioPort telephonyRxInMix =
createPort(c.nextPortId++, "telephony_rx", 0, true, createPortMixExt(0, 1));
telephonyRxInMix.profiles.insert(telephonyRxInMix.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(telephonyRxInMix);
AudioPort fmTunerInMix =
createPort(c.nextPortId++, "fm_tuner", 0, true, createPortMixExt(0, 1));
fmTunerInMix.profiles.insert(fmTunerInMix.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(fmTunerInMix);
c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice));
c.routes.push_back(createRoute({micInDevice}, primaryInMix));
c.routes.push_back(createRoute({telephonyRxInDevice}, telephonyRxInMix));
c.routes.push_back(createRoute({telephonyTxOutMix}, telephonyTxOutDevice));
c.routes.push_back(createRoute({fmTunerInDevice}, fmTunerInMix));
c.portConfigs.insert(c.portConfigs.end(), c.initialConfigs.begin(), c.initialConfigs.end());
return c;
}();
return std::make_unique<Configuration>(configuration);
}
// Note: When transitioning to loading of XML configs, either keep the configuration
// of the remote submix sources from this static configuration, or update the XML
// config to match it. There are several reasons for that:
// 1. The "Remote Submix In" device is listed in the XML config as "attached",
// however in the AIDL scheme its device type has a "virtual" connection.
// 2. The canonical r_submix configuration only lists 'STEREO' and '48000',
// however the framework attempts to open streams for other sample rates
// as well. The legacy r_submix implementation allowed that, but libaudiohal@aidl
// will not find a mix port to use. Because of that, list all sample rates that
// the legacy implementation allowed (note that mono was not allowed, the framework
// is expected to upmix mono tracks into stereo if needed).
// 3. The legacy implementation had a hard limit on the number of routes (10),
// and this is checked indirectly by AudioPlaybackCaptureTest#testPlaybackCaptureDoS
// CTS test. Instead of hardcoding the number of routes, we can use
// "maxOpen/ActiveStreamCount" to enforce a similar limit. However, the canonical
// XML file lacks this specification.
//
// Remote Submix configuration:
//
// Device ports:
// * "Remote Submix Out", OUT_SUBMIX
// - no profiles specified
// * "Remote Submix In", IN_SUBMIX
// - no profiles specified
//
// Mix ports:
// * "r_submix output", maximum 10 opened streams, maximum 10 active streams
// - profile PCM 16-bit; STEREO; 8000, 11025, 16000, 32000, 44100, 48000, 192000
// * "r_submix input", maximum 10 opened streams, maximum 10 active streams
// - profile PCM 16-bit; STEREO; 8000, 11025, 16000, 32000, 44100, 48000, 192000
//
// Routes:
// "r_submix output" -> "Remote Submix Out"
// "Remote Submix In" -> "r_submix input"
//
std::unique_ptr<Configuration> getRSubmixConfiguration() {
static const Configuration configuration = []() {
Configuration c;
const std::vector<AudioProfile> remoteSubmixPcmAudioProfiles{
createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO},
{8000, 11025, 16000, 32000, 44100, 48000, 192000})};
// Device ports
AudioPort rsubmixOutDevice =
createPort(c.nextPortId++, "Remote Submix Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_SUBMIX, 0,
AudioDeviceDescription::CONNECTION_VIRTUAL));
c.ports.push_back(rsubmixOutDevice);
c.connectedProfiles[rsubmixOutDevice.id] = remoteSubmixPcmAudioProfiles;
AudioPort rsubmixInDevice =
createPort(c.nextPortId++, "Remote Submix In", 0, true,
createDeviceExt(AudioDeviceType::IN_SUBMIX, 0,
AudioDeviceDescription::CONNECTION_VIRTUAL));
c.ports.push_back(rsubmixInDevice);
c.connectedProfiles[rsubmixInDevice.id] = remoteSubmixPcmAudioProfiles;
// Mix ports
AudioPort rsubmixOutMix =
createPort(c.nextPortId++, "r_submix output", 0, false, createPortMixExt(10, 10));
rsubmixOutMix.profiles = remoteSubmixPcmAudioProfiles;
c.ports.push_back(rsubmixOutMix);
AudioPort rsubmixInMix =
createPort(c.nextPortId++, "r_submix input", 0, true, createPortMixExt(10, 10));
rsubmixInMix.profiles = remoteSubmixPcmAudioProfiles;
c.ports.push_back(rsubmixInMix);
c.routes.push_back(createRoute({rsubmixOutMix}, rsubmixOutDevice));
c.routes.push_back(createRoute({rsubmixInDevice}, rsubmixInMix));
return c;
}();
return std::make_unique<Configuration>(configuration);
}
// Usb configuration:
//
// Device ports:
// * "USB Device Out", OUT_DEVICE, CONNECTION_USB
// - no profiles specified
// * "USB Headset Out", OUT_HEADSET, CONNECTION_USB
// - no profiles specified
// * "USB Device In", IN_DEVICE, CONNECTION_USB
// - no profiles specified
// * "USB Headset In", IN_HEADSET, CONNECTION_USB
// - no profiles specified
//
// Mix ports:
// * "usb_device output", 1 max open, 1 max active stream
// - no profiles specified
// * "usb_device input", 1 max open, 1 max active stream
// - no profiles specified
//
// Routes:
// * "usb_device output" -> "USB Device Out"
// * "usb_device output" -> "USB Headset Out"
// * "USB Device In", "USB Headset In" -> "usb_device input"
//
// Profiles for device port connected state (when simulating connections):
// * "USB Device Out", "USB Headset Out":
// - profile PCM 16-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000
// - profile PCM 24-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000
// * "USB Device In", "USB Headset In":
// - profile PCM 16-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000
// - profile PCM 24-bit; MONO, STEREO, INDEX_MASK_1, INDEX_MASK_2; 44100, 48000
//
std::unique_ptr<Configuration> getUsbConfiguration() {
static const Configuration configuration = []() {
const std::vector<AudioProfile> standardPcmAudioProfiles = {
createProfile(PcmType::INT_16_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO,
AudioChannelLayout::INDEX_MASK_1, AudioChannelLayout::INDEX_MASK_2},
{44100, 48000}),
createProfile(PcmType::INT_24_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO,
AudioChannelLayout::INDEX_MASK_1, AudioChannelLayout::INDEX_MASK_2},
{44100, 48000})};
Configuration c;
// Device ports
AudioPort usbOutDevice =
createPort(c.nextPortId++, "USB Device Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
AudioDeviceDescription::CONNECTION_USB));
c.ports.push_back(usbOutDevice);
c.connectedProfiles[usbOutDevice.id] = standardPcmAudioProfiles;
AudioPort usbOutHeadset =
createPort(c.nextPortId++, "USB Headset Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_HEADSET, 0,
AudioDeviceDescription::CONNECTION_USB));
c.ports.push_back(usbOutHeadset);
c.connectedProfiles[usbOutHeadset.id] = standardPcmAudioProfiles;
AudioPort usbInDevice = createPort(c.nextPortId++, "USB Device In", 0, true,
createDeviceExt(AudioDeviceType::IN_DEVICE, 0,
AudioDeviceDescription::CONNECTION_USB));
c.ports.push_back(usbInDevice);
c.connectedProfiles[usbInDevice.id] = standardPcmAudioProfiles;
AudioPort usbInHeadset =
createPort(c.nextPortId++, "USB Headset In", 0, true,
createDeviceExt(AudioDeviceType::IN_HEADSET, 0,
AudioDeviceDescription::CONNECTION_USB));
c.ports.push_back(usbInHeadset);
c.connectedProfiles[usbInHeadset.id] = standardPcmAudioProfiles;
// Mix ports
AudioPort usbDeviceOutMix =
createPort(c.nextPortId++, "usb_device output", 0, false, createPortMixExt(1, 1));
c.ports.push_back(usbDeviceOutMix);
AudioPort usbDeviceInMix =
createPort(c.nextPortId++, "usb_device input", 0, true, createPortMixExt(0, 1));
c.ports.push_back(usbDeviceInMix);
c.routes.push_back(createRoute({usbDeviceOutMix}, usbOutDevice));
c.routes.push_back(createRoute({usbDeviceOutMix}, usbOutHeadset));
c.routes.push_back(createRoute({usbInDevice, usbInHeadset}, usbDeviceInMix));
return c;
}();
return std::make_unique<Configuration>(configuration);
}
// Stub configuration:
//
// Device ports:
// * "Test Out", OUT_AFE_PROXY
// - no profiles specified
// * "Test In", IN_AFE_PROXY
// - no profiles specified
// * "Wired Headset", OUT_HEADSET
// - no profiles specified
// * "Wired Headset Mic", IN_HEADSET
// - no profiles specified
//
// Mix ports:
// * "test output", 1 max open, 1 max active stream
// - profile PCM 24-bit; MONO, STEREO; 8000, 11025, 16000, 32000, 44100, 48000
// * "test fast output", 1 max open, 1 max active stream
// - profile PCM 24-bit; STEREO; 44100, 48000
// * "test compressed offload", DIRECT|COMPRESS_OFFLOAD|NON_BLOCKING, 1 max open, 1 max active
// stream
// - profile MP3; MONO, STEREO; 44100, 48000
// * "test input", 2 max open, 2 max active streams
// - profile PCM 24-bit; MONO, STEREO, FRONT_BACK;
// 8000, 11025, 16000, 22050, 32000, 44100, 48000
//
// Routes:
// "test output", "test fast output", "test compressed offload" -> "Test Out"
// "test output" -> "Wired Headset"
// "Test In", "Wired Headset Mic" -> "test input"
//
// Initial port configs:
// * "Test Out" device port: PCM 24-bit; STEREO; 48000
// * "Test In" device port: PCM 24-bit; MONO; 48000
//
// Profiles for device port connected state (when simulating connections):
// * "Wired Headset": dynamic profiles
// * "Wired Headset Mic": dynamic profiles
//
std::unique_ptr<Configuration> getStubConfiguration() {
static const Configuration configuration = []() {
Configuration c;
// Device ports
AudioPort testOutDevice = createPort(c.nextPortId++, "Test Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_AFE_PROXY, 0));
c.ports.push_back(testOutDevice);
c.initialConfigs.push_back(
createPortConfig(testOutDevice.id, testOutDevice.id, PcmType::INT_24_BIT,
AudioChannelLayout::LAYOUT_STEREO, 48000, 0, false,
createDeviceExt(AudioDeviceType::OUT_AFE_PROXY, 0)));
AudioPort headsetOutDevice =
createPort(c.nextPortId++, "Wired Headset", 0, false,
createDeviceExt(AudioDeviceType::OUT_HEADSET, 0,
AudioDeviceDescription::CONNECTION_ANALOG));
c.ports.push_back(headsetOutDevice);
AudioPort testInDevice = createPort(c.nextPortId++, "Test In", 0, true,
createDeviceExt(AudioDeviceType::IN_AFE_PROXY, 0));
c.ports.push_back(testInDevice);
c.initialConfigs.push_back(
createPortConfig(testInDevice.id, testInDevice.id, PcmType::INT_24_BIT,
AudioChannelLayout::LAYOUT_MONO, 48000, 0, true,
createDeviceExt(AudioDeviceType::IN_AFE_PROXY, 0)));
AudioPort headsetInDevice =
createPort(c.nextPortId++, "Wired Headset Mic", 0, true,
createDeviceExt(AudioDeviceType::IN_HEADSET, 0,
AudioDeviceDescription::CONNECTION_ANALOG));
c.ports.push_back(headsetInDevice);
// Mix ports
AudioPort testOutMix =
createPort(c.nextPortId++, "test output", 0, false, createPortMixExt(1, 1));
testOutMix.profiles.push_back(
createProfile(PcmType::INT_24_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO},
{8000, 11025, 16000, 32000, 44100, 48000}));
c.ports.push_back(testOutMix);
AudioPort testFastOutMix = createPort(c.nextPortId++, "test fast output",
makeBitPositionFlagMask({AudioOutputFlags::FAST}),
false, createPortMixExt(1, 1));
testFastOutMix.profiles.push_back(createProfile(
PcmType::INT_24_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {44100, 48000}));
c.ports.push_back(testFastOutMix);
AudioPort compressedOffloadOutMix =
createPort(c.nextPortId++, "test compressed offload",
makeBitPositionFlagMask({AudioOutputFlags::DIRECT,
AudioOutputFlags::COMPRESS_OFFLOAD,
AudioOutputFlags::NON_BLOCKING}),
false, createPortMixExt(1, 1));
compressedOffloadOutMix.profiles.push_back(
createProfile(::android::MEDIA_MIMETYPE_AUDIO_MPEG,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO},
{44100, 48000}));
c.ports.push_back(compressedOffloadOutMix);
AudioPort testInMix =
createPort(c.nextPortId++, "test input", 0, true, createPortMixExt(2, 2));
testInMix.profiles.push_back(
createProfile(PcmType::INT_16_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO,
AudioChannelLayout::LAYOUT_FRONT_BACK},
{8000, 11025, 16000, 22050, 32000, 44100, 48000}));
testInMix.profiles.push_back(
createProfile(PcmType::INT_24_BIT,
{AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_STEREO,
AudioChannelLayout::LAYOUT_FRONT_BACK},
{8000, 11025, 16000, 22050, 32000, 44100, 48000}));
c.ports.push_back(testInMix);
c.routes.push_back(
createRoute({testOutMix, testFastOutMix, compressedOffloadOutMix}, testOutDevice));
c.routes.push_back(createRoute({testOutMix}, headsetOutDevice));
c.routes.push_back(createRoute({testInDevice, headsetInDevice}, testInMix));
c.portConfigs.insert(c.portConfigs.end(), c.initialConfigs.begin(), c.initialConfigs.end());
return c;
}();
return std::make_unique<Configuration>(configuration);
}
// Bluetooth configuration:
//
// Device ports:
// * "BT A2DP Out", OUT_DEVICE, CONNECTION_BT_A2DP
// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000
// * "BT A2DP Headphones", OUT_HEADPHONE, CONNECTION_BT_A2DP
// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000
// * "BT A2DP Speaker", OUT_SPEAKER, CONNECTION_BT_A2DP
// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000
// * "BT Hearing Aid Out", OUT_HEARING_AID, CONNECTION_WIRELESS
// - no profiles specified
//
// Mix ports:
// * "a2dp output", 1 max open, 1 max active stream
// - no profiles specified
// * "hearing aid output", 1 max open, 1 max active stream
// - profile PCM 16-bit; STEREO; 16000, 24000
//
// Routes:
// "a2dp output" -> "BT A2DP Out"
// "a2dp output" -> "BT A2DP Headphones"
// "a2dp output" -> "BT A2DP Speaker"
// "hearing aid output" -> "BT Hearing Aid Out"
//
// Profiles for device port connected state (when simulating connections):
// * "BT A2DP Out", "BT A2DP Headphones", "BT A2DP Speaker":
// - profile PCM 16-bit; STEREO; 44100, 48000, 88200, 96000
// * "BT Hearing Aid Out":
// - profile PCM 16-bit; STEREO; 16000, 24000
//
std::unique_ptr<Configuration> getBluetoothConfiguration() {
static const Configuration configuration = []() {
const std::vector<AudioProfile> standardPcmAudioProfiles = {
createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO},
{44100, 48000, 88200, 96000})};
const std::vector<AudioProfile> hearingAidAudioProfiles = {createProfile(
PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {16000, 24000})};
Configuration c;
// Device ports
AudioPort btOutDevice =
createPort(c.nextPortId++, "BT A2DP Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
AudioDeviceDescription::CONNECTION_BT_A2DP));
btOutDevice.profiles.insert(btOutDevice.profiles.begin(), standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(btOutDevice);
c.connectedProfiles[btOutDevice.id] = standardPcmAudioProfiles;
AudioPort btOutHeadphone =
createPort(c.nextPortId++, "BT A2DP Headphones", 0, false,
createDeviceExt(AudioDeviceType::OUT_HEADPHONE, 0,
AudioDeviceDescription::CONNECTION_BT_A2DP));
btOutHeadphone.profiles.insert(btOutHeadphone.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(btOutHeadphone);
c.connectedProfiles[btOutHeadphone.id] = standardPcmAudioProfiles;
AudioPort btOutSpeaker =
createPort(c.nextPortId++, "BT A2DP Speaker", 0, false,
createDeviceExt(AudioDeviceType::OUT_SPEAKER, 0,
AudioDeviceDescription::CONNECTION_BT_A2DP));
btOutSpeaker.profiles.insert(btOutSpeaker.profiles.begin(),
standardPcmAudioProfiles.begin(),
standardPcmAudioProfiles.end());
c.ports.push_back(btOutSpeaker);
c.connectedProfiles[btOutSpeaker.id] = standardPcmAudioProfiles;
AudioPort btOutHearingAid =
createPort(c.nextPortId++, "BT Hearing Aid Out", 0, false,
createDeviceExt(AudioDeviceType::OUT_HEARING_AID, 0,
AudioDeviceDescription::CONNECTION_WIRELESS));
c.ports.push_back(btOutHearingAid);
c.connectedProfiles[btOutHearingAid.id] = hearingAidAudioProfiles;
// Mix ports
AudioPort btOutMix =
createPort(c.nextPortId++, "a2dp output", 0, false, createPortMixExt(1, 1));
c.ports.push_back(btOutMix);
AudioPort btHearingOutMix =
createPort(c.nextPortId++, "hearing aid output", 0, false, createPortMixExt(1, 1));
btHearingOutMix.profiles = hearingAidAudioProfiles;
c.ports.push_back(btHearingOutMix);
c.routes.push_back(createRoute({btOutMix}, btOutDevice));
c.routes.push_back(createRoute({btOutMix}, btOutHeadphone));
c.routes.push_back(createRoute({btOutMix}, btOutSpeaker));
c.routes.push_back(createRoute({btHearingOutMix}, btOutHearingAid));
return c;
}();
return std::make_unique<Configuration>(configuration);
}
std::unique_ptr<Module::Configuration> getConfiguration(Module::Type moduleType) {
switch (moduleType) {
case Module::Type::DEFAULT:
return getPrimaryConfiguration();
case Module::Type::R_SUBMIX:
return getRSubmixConfiguration();
case Module::Type::STUB:
return getStubConfiguration();
case Module::Type::USB:
return getUsbConfiguration();
case Module::Type::BLUETOOTH:
return getBluetoothConfiguration();
}
}
} // namespace aidl::android::hardware::audio::core::internal

357
audio/EffectConfig.cpp Normal file
View File

@@ -0,0 +1,357 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <optional>
#include <string>
#define LOG_TAG "AHAL_EffectConfig"
#include <android-base/logging.h>
#include <media/AidlConversionCppNdk.h>
#include <system/audio.h>
#include <system/audio_aidl_utils.h>
#include <system/audio_effects/audio_effects_conf.h>
#include <system/audio_effects/effect_uuid.h>
#include "effectFactory-impl/EffectConfig.h"
#ifdef __ANDROID_APEX__
#include <android/apexsupport.h>
#endif
using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::AudioSource;
using aidl::android::media::audio::common::AudioStreamType;
using aidl::android::media::audio::common::AudioUuid;
namespace aidl::android::hardware::audio::effect {
EffectConfig::EffectConfig(const std::string& file) {
tinyxml2::XMLDocument doc;
doc.LoadFile(file.c_str());
// parse the xml file into maps
if (doc.Error()) {
LOG(ERROR) << __func__ << " tinyxml2 failed to load " << file
<< " error: " << doc.ErrorStr();
return;
}
auto registerFailure = [&](bool result) { mSkippedElements += result ? 0 : 1; };
for (auto& xmlConfig : getChildren(doc, "audio_effects_conf")) {
// Parse library
for (auto& xmlLibraries : getChildren(xmlConfig, "libraries")) {
for (auto& xmlLibrary : getChildren(xmlLibraries, "library")) {
registerFailure(parseLibrary(xmlLibrary));
}
}
// Parse effects
for (auto& xmlEffects : getChildren(xmlConfig, "effects")) {
for (auto& xmlEffect : getChildren(xmlEffects)) {
registerFailure(parseEffect(xmlEffect));
}
}
// Parse pre processing chains
for (auto& xmlPreprocess : getChildren(xmlConfig, "preprocess")) {
for (auto& xmlStream : getChildren(xmlPreprocess, "stream")) {
// AudioSource
registerFailure(parseProcessing(Processing::Type::source, xmlStream));
}
}
// Parse post processing chains
for (auto& xmlPostprocess : getChildren(xmlConfig, "postprocess")) {
for (auto& xmlStream : getChildren(xmlPostprocess, "stream")) {
// AudioStreamType
registerFailure(parseProcessing(Processing::Type::streamType, xmlStream));
}
}
// Parse device effect chains
for (auto& xmlDeviceEffects : getChildren(xmlConfig, "deviceEffects")) {
for (auto& xmlDevice : getChildren(xmlDeviceEffects, "device")) {
// AudioDevice
registerFailure(parseProcessing(Processing::Type::device, xmlDevice));
}
}
}
LOG(DEBUG) << __func__ << " successfully parsed " << file << ", skipping " << mSkippedElements
<< " element(s)";
}
std::vector<std::reference_wrapper<const tinyxml2::XMLElement>> EffectConfig::getChildren(
const tinyxml2::XMLNode& node, const char* childTag) {
std::vector<std::reference_wrapper<const tinyxml2::XMLElement>> children;
for (auto* child = node.FirstChildElement(childTag); child != nullptr;
child = child->NextSiblingElement(childTag)) {
children.emplace_back(*child);
}
return children;
}
bool EffectConfig::resolveLibrary(const std::string& path, std::string* resolvedPath) {
#ifdef __ANDROID_APEX__
if constexpr (__ANDROID_VENDOR_API__ >= 202404) {
AApexInfo *apexInfo;
if (AApexInfo_create(&apexInfo) == AAPEXINFO_OK) {
std::string apexName(AApexInfo_getName(apexInfo));
AApexInfo_destroy(apexInfo);
std::string candidatePath("/apex/");
candidatePath.append(apexName).append(kEffectLibApexPath).append(path);
LOG(DEBUG) << __func__ << " effect lib path " << candidatePath;
if (access(candidatePath.c_str(), R_OK) == 0) {
*resolvedPath = std::move(candidatePath);
return true;
}
}
} else {
LOG(DEBUG) << __func__ << " libapexsupport is not supported";
}
#endif
// If audio effects libs are not in vendor apex, locate them in kEffectLibPath
for (auto* libraryDirectory : kEffectLibPath) {
std::string candidatePath = std::string(libraryDirectory) + '/' + path;
if (access(candidatePath.c_str(), R_OK) == 0) {
*resolvedPath = std::move(candidatePath);
return true;
}
}
return false;
}
bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml) {
const char* name = xml.Attribute("name");
RETURN_VALUE_IF(!name, false, "noNameAttribute");
const char* path = xml.Attribute("path");
RETURN_VALUE_IF(!path, false, "noPathAttribute");
std::string resolvedPath;
if (!resolveLibrary(path, &resolvedPath)) {
LOG(ERROR) << __func__ << " can't find " << path;
return false;
}
mLibraryMap[name] = resolvedPath;
LOG(DEBUG) << __func__ << " " << name << " : " << resolvedPath;
return true;
}
bool EffectConfig::parseEffect(const tinyxml2::XMLElement& xml) {
struct EffectLibraries effectLibraries;
std::vector<Library> libraries;
std::string name = xml.Attribute("name");
RETURN_VALUE_IF(name == "", false, "effectsNoName");
LOG(VERBOSE) << __func__ << dump(xml);
struct Library library;
if (std::strcmp(xml.Name(), "effectProxy") == 0) {
// proxy lib and uuid
RETURN_VALUE_IF(!parseLibrary(xml, library, true), false, "parseProxyLibFailed");
effectLibraries.proxyLibrary = library;
// proxy effect libs and UUID
auto xmlProxyLib = xml.FirstChildElement();
RETURN_VALUE_IF(!xmlProxyLib, false, "noLibForProxy");
while (xmlProxyLib) {
struct Library tempLibrary;
RETURN_VALUE_IF(!parseLibrary(*xmlProxyLib, tempLibrary), false,
"parseEffectLibFailed");
libraries.push_back(std::move(tempLibrary));
xmlProxyLib = xmlProxyLib->NextSiblingElement();
}
} else {
// expect only one library if not proxy
RETURN_VALUE_IF(!parseLibrary(xml, library), false, "parseEffectLibFailed");
libraries.push_back(std::move(library));
}
effectLibraries.libraries = std::move(libraries);
mEffectsMap[name] = std::move(effectLibraries);
return true;
}
bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml, struct Library& library,
bool isProxy) {
// Retrieve library name only if not effectProxy element
if (!isProxy) {
const char* name = xml.Attribute("library");
RETURN_VALUE_IF(!name, false, "noLibraryAttribute");
library.name = name;
}
const char* uuidStr = xml.Attribute("uuid");
RETURN_VALUE_IF(!uuidStr, false, "noUuidAttribute");
library.uuid = stringToUuid(uuidStr);
if (const char* typeUuidStr = xml.Attribute("type")) {
library.type = stringToUuid(typeUuidStr);
}
RETURN_VALUE_IF((library.uuid == getEffectUuidZero()), false, "invalidUuidAttribute");
LOG(VERBOSE) << __func__ << (isProxy ? " proxy " : library.name) << " : uuid "
<< ::android::audio::utils::toString(library.uuid)
<< (library.type.has_value()
? ::android::audio::utils::toString(library.type.value())
: "");
return true;
}
std::optional<Processing::Type> EffectConfig::stringToProcessingType(Processing::Type::Tag typeTag,
const std::string& type,
const std::string& address) {
// see list of audio stream types in audio_stream_type_t:
// system/media/audio/include/system/audio_effects/audio_effects_conf.h
// AUDIO_STREAM_DEFAULT_TAG is not listed here because according to SYS_RESERVED_DEFAULT in
// AudioStreamType.aidl: "Value reserved for system use only. HALs must never return this value
// to the system or accept it from the system".
static const std::map<const std::string, AudioStreamType> sAudioStreamTypeTable = {
{AUDIO_STREAM_VOICE_CALL_TAG, AudioStreamType::VOICE_CALL},
{AUDIO_STREAM_SYSTEM_TAG, AudioStreamType::SYSTEM},
{AUDIO_STREAM_RING_TAG, AudioStreamType::RING},
{AUDIO_STREAM_MUSIC_TAG, AudioStreamType::MUSIC},
{AUDIO_STREAM_ALARM_TAG, AudioStreamType::ALARM},
{AUDIO_STREAM_NOTIFICATION_TAG, AudioStreamType::NOTIFICATION},
{AUDIO_STREAM_BLUETOOTH_SCO_TAG, AudioStreamType::BLUETOOTH_SCO},
{AUDIO_STREAM_ENFORCED_AUDIBLE_TAG, AudioStreamType::ENFORCED_AUDIBLE},
{AUDIO_STREAM_DTMF_TAG, AudioStreamType::DTMF},
{AUDIO_STREAM_TTS_TAG, AudioStreamType::TTS},
{AUDIO_STREAM_ASSISTANT_TAG, AudioStreamType::ASSISTANT}};
// see list of audio sources in audio_source_t:
// system/media/audio/include/system/audio_effects/audio_effects_conf.h
static const std::map<const std::string, AudioSource> sAudioSourceTable = {
{MIC_SRC_TAG, AudioSource::MIC},
{VOICE_UL_SRC_TAG, AudioSource::VOICE_UPLINK},
{VOICE_DL_SRC_TAG, AudioSource::VOICE_DOWNLINK},
{VOICE_CALL_SRC_TAG, AudioSource::VOICE_CALL},
{CAMCORDER_SRC_TAG, AudioSource::CAMCORDER},
{VOICE_REC_SRC_TAG, AudioSource::VOICE_RECOGNITION},
{VOICE_COMM_SRC_TAG, AudioSource::VOICE_COMMUNICATION},
{REMOTE_SUBMIX_SRC_TAG, AudioSource::REMOTE_SUBMIX},
{UNPROCESSED_SRC_TAG, AudioSource::UNPROCESSED},
{VOICE_PERFORMANCE_SRC_TAG, AudioSource::VOICE_PERFORMANCE}};
if (typeTag == Processing::Type::streamType) {
auto typeIter = sAudioStreamTypeTable.find(type);
if (typeIter != sAudioStreamTypeTable.end()) {
return typeIter->second;
}
} else if (typeTag == Processing::Type::source) {
auto typeIter = sAudioSourceTable.find(type);
if (typeIter != sAudioSourceTable.end()) {
return typeIter->second;
}
} else if (typeTag == Processing::Type::device) {
audio_devices_t deviceType;
if (!audio_device_from_string(type.c_str(), &deviceType)) {
LOG(ERROR) << __func__ << "DeviceEffect: invalid type " << type;
return std::nullopt;
}
auto ret = ::aidl::android::legacy2aidl_audio_device_AudioDevice(deviceType, address);
if (!ret.ok()) {
LOG(ERROR) << __func__ << "DeviceEffect: Failed to get AudioDevice from type "
<< deviceType << ", address " << address;
return std::nullopt;
}
return ret.value();
}
return std::nullopt;
}
bool EffectConfig::parseProcessing(Processing::Type::Tag typeTag, const tinyxml2::XMLElement& xml) {
LOG(VERBOSE) << __func__ << dump(xml);
const char* typeStr = xml.Attribute("type");
const char* addressStr = xml.Attribute("address");
// For device effect, device address is optional, match will be done for the given device type
// with empty address.
auto aidlType = stringToProcessingType(typeTag, typeStr, addressStr ? addressStr : "");
RETURN_VALUE_IF(!aidlType.has_value(), false, "illegalStreamType");
RETURN_VALUE_IF(0 != mProcessingMap.count(aidlType.value()), false, "duplicateStreamType");
for (auto& apply : getChildren(xml, "apply")) {
const char* name = apply.get().Attribute("effect");
if (mEffectsMap.find(name) == mEffectsMap.end()) {
LOG(ERROR) << __func__ << " effect " << name << " doesn't exist, skipping";
continue;
}
RETURN_VALUE_IF(!name, false, "noEffectAttribute");
mProcessingMap[aidlType.value()].emplace_back(mEffectsMap[name]);
}
return true;
}
const std::map<Processing::Type, std::vector<EffectConfig::EffectLibraries>>&
EffectConfig::getProcessingMap() const {
return mProcessingMap;
}
bool EffectConfig::findUuid(const std::pair<std::string, struct EffectLibraries>& effectElem,
AudioUuid* uuid) {
// Difference from EFFECT_TYPE_LIST_DEF, there could be multiple name mapping to same Effect Type
#define EFFECT_XML_TYPE_LIST_DEF(V) \
V("acoustic_echo_canceler", AcousticEchoCanceler) \
V("automatic_gain_control_v1", AutomaticGainControlV1) \
V("automatic_gain_control_v2", AutomaticGainControlV2) \
V("bassboost", BassBoost) \
V("downmix", Downmix) \
V("dynamics_processing", DynamicsProcessing) \
V("equalizer", Equalizer) \
V("extensioneffect", Extension) \
V("haptic_generator", HapticGenerator) \
V("loudness_enhancer", LoudnessEnhancer) \
V("env_reverb", EnvReverb) \
V("reverb_env_aux", EnvReverb) \
V("reverb_env_ins", EnvReverb) \
V("preset_reverb", PresetReverb) \
V("reverb_pre_aux", PresetReverb) \
V("reverb_pre_ins", PresetReverb) \
V("noise_suppression", NoiseSuppression) \
V("spatializer", Spatializer) \
V("virtualizer", Virtualizer) \
V("visualizer", Visualizer) \
V("volume", Volume)
#define GENERATE_MAP_ENTRY_V(s, symbol) {s, &getEffectTypeUuid##symbol},
const std::string xmlEffectName = effectElem.first;
typedef const AudioUuid& (*UuidGetter)(void);
static const std::map<std::string, UuidGetter> uuidMap{
// std::make_pair("s", &getEffectTypeUuidExtension)};
{EFFECT_XML_TYPE_LIST_DEF(GENERATE_MAP_ENTRY_V)}};
if (auto it = uuidMap.find(xmlEffectName); it != uuidMap.end()) {
*uuid = (*it->second)();
return true;
}
const auto& libs = effectElem.second.libraries;
for (const auto& lib : libs) {
if (lib.type.has_value()) {
*uuid = lib.type.value();
return true;
}
}
return false;
}
const char* EffectConfig::dump(const tinyxml2::XMLElement& element,
tinyxml2::XMLPrinter&& printer) const {
element.Accept(&printer);
return printer.CStr();
}
} // namespace aidl::android::hardware::audio::effect

275
audio/EffectContext.cpp Normal file
View File

@@ -0,0 +1,275 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <memory>
#define LOG_TAG "AHAL_EffectContext"
#include "effect-impl/EffectContext.h"
#include "include/effect-impl/EffectTypes.h"
using aidl::android::hardware::audio::common::getChannelCount;
using aidl::android::hardware::audio::common::getFrameSizeInBytes;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using aidl::android::media::audio::common::PcmType;
using ::android::hardware::EventFlag;
namespace aidl::android::hardware::audio::effect {
EffectContext::EffectContext(size_t statusDepth, const Parameter::Common& common) {
LOG_ALWAYS_FATAL_IF(RetCode::SUCCESS != setCommon(common), "illegalCommonParameter");
// in/outBuffer size in float (FMQ data format defined for DataMQ)
size_t inBufferSizeInFloat = common.input.frameCount * mInputFrameSize / sizeof(float);
size_t outBufferSizeInFloat = common.output.frameCount * mOutputFrameSize / sizeof(float);
// only status FMQ use the EventFlag
mStatusMQ = std::make_shared<StatusMQ>(statusDepth, true /*configureEventFlagWord*/);
mInputMQ = std::make_shared<DataMQ>(inBufferSizeInFloat);
mOutputMQ = std::make_shared<DataMQ>(outBufferSizeInFloat);
if (!mStatusMQ->isValid() || !mInputMQ->isValid() || !mOutputMQ->isValid()) {
LOG(ERROR) << __func__ << " created invalid FMQ, statusMQ: " << mStatusMQ->isValid()
<< " inputMQ: " << mInputMQ->isValid() << " outputMQ: " << mOutputMQ->isValid();
}
::android::status_t status =
EventFlag::createEventFlag(mStatusMQ->getEventFlagWord(), &mEfGroup);
LOG_ALWAYS_FATAL_IF(status != ::android::OK || !mEfGroup, " create EventFlagGroup failed ");
mWorkBuffer.resize(std::max(inBufferSizeInFloat, outBufferSizeInFloat));
}
// reset buffer status by abandon input data in FMQ
void EffectContext::resetBuffer() {
auto buffer = static_cast<float*>(mWorkBuffer.data());
if (mStatusMQ) {
std::vector<IEffect::Status> status(mStatusMQ->availableToRead());
}
if (mInputMQ) {
mInputMQ->read(buffer, mInputMQ->availableToRead());
}
}
void EffectContext::dupeFmqWithReopen(IEffect::OpenEffectReturn* effectRet) {
const size_t inBufferSizeInFloat = mCommon.input.frameCount * mInputFrameSize / sizeof(float);
const size_t outBufferSizeInFloat =
mCommon.output.frameCount * mOutputFrameSize / sizeof(float);
const size_t bufferSize = std::max(inBufferSizeInFloat, outBufferSizeInFloat);
if (!mInputMQ) {
mInputMQ = std::make_shared<DataMQ>(inBufferSizeInFloat);
}
if (!mOutputMQ) {
mOutputMQ = std::make_shared<DataMQ>(outBufferSizeInFloat);
}
if (mWorkBuffer.size() != bufferSize) {
mWorkBuffer.resize(bufferSize);
}
dupeFmq(effectRet);
}
void EffectContext::dupeFmq(IEffect::OpenEffectReturn* effectRet) {
if (effectRet && mStatusMQ && mInputMQ && mOutputMQ) {
effectRet->statusMQ = mStatusMQ->dupeDesc();
effectRet->inputDataMQ = mInputMQ->dupeDesc();
effectRet->outputDataMQ = mOutputMQ->dupeDesc();
}
}
float* EffectContext::getWorkBuffer() {
return static_cast<float*>(mWorkBuffer.data());
}
size_t EffectContext::getWorkBufferSize() const {
return mWorkBuffer.size();
}
std::shared_ptr<EffectContext::StatusMQ> EffectContext::getStatusFmq() const {
return mStatusMQ;
}
std::shared_ptr<EffectContext::DataMQ> EffectContext::getInputDataFmq() const {
return mInputMQ;
}
std::shared_ptr<EffectContext::DataMQ> EffectContext::getOutputDataFmq() const {
return mOutputMQ;
}
size_t EffectContext::getInputFrameSize() const {
return mInputFrameSize;
}
size_t EffectContext::getOutputFrameSize() const {
return mOutputFrameSize;
}
int EffectContext::getSessionId() const {
return mCommon.session;
}
int EffectContext::getIoHandle() const {
return mCommon.ioHandle;
}
RetCode EffectContext::setOutputDevice(
const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& device) {
mOutputDevice = device;
return RetCode::SUCCESS;
}
std::vector<aidl::android::media::audio::common::AudioDeviceDescription>
EffectContext::getOutputDevice() {
return mOutputDevice;
}
RetCode EffectContext::setAudioMode(const aidl::android::media::audio::common::AudioMode& mode) {
mMode = mode;
return RetCode::SUCCESS;
}
aidl::android::media::audio::common::AudioMode EffectContext::getAudioMode() {
return mMode;
}
RetCode EffectContext::setAudioSource(
const aidl::android::media::audio::common::AudioSource& source) {
mSource = source;
return RetCode::SUCCESS;
}
aidl::android::media::audio::common::AudioSource EffectContext::getAudioSource() {
return mSource;
}
RetCode EffectContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
mVolumeStereo = volumeStereo;
return RetCode::SUCCESS;
}
Parameter::VolumeStereo EffectContext::getVolumeStereo() {
return mVolumeStereo;
}
RetCode EffectContext::setCommon(const Parameter::Common& common) {
auto& input = common.input;
auto& output = common.output;
if (input.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT ||
output.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT) {
LOG(ERROR) << __func__ << " illegal IO, input "
<< ::android::internal::ToString(input.base.format) << ", output "
<< ::android::internal::ToString(output.base.format);
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
if (auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
return ret;
}
mInputChannelCount = getChannelCount(input.base.channelMask);
mOutputChannelCount = getChannelCount(output.base.channelMask);
if (mInputChannelCount == 0 || mOutputChannelCount == 0) {
LOG(ERROR) << __func__ << " illegal channel count input " << mInputChannelCount
<< ", output " << mOutputChannelCount;
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
mCommon = common;
return RetCode::SUCCESS;
}
Parameter::Common EffectContext::getCommon() {
return mCommon;
}
EventFlag* EffectContext::getStatusEventFlag() {
return mEfGroup;
}
RetCode EffectContext::updateIOFrameSize(const Parameter::Common& common) {
const auto prevInputFrameSize = mInputFrameSize;
const auto prevOutputFrameSize = mOutputFrameSize;
mInputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes(
common.input.base.format, common.input.base.channelMask);
mOutputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes(
common.output.base.format, common.output.base.channelMask);
// workBuffer and data MQ not allocated yet, no need to update
if (mWorkBuffer.size() == 0 || !mInputMQ || !mOutputMQ) {
return RetCode::SUCCESS;
}
// IEffect::reopen introduced in android.hardware.audio.effect-V2
if (mVersion < kReopenSupportedVersion) {
LOG(WARNING) << __func__ << " skipped for HAL version " << mVersion;
return RetCode::SUCCESS;
}
bool needUpdateMq = false;
if (mInputFrameSize != prevInputFrameSize ||
mCommon.input.frameCount != common.input.frameCount) {
mInputMQ.reset();
needUpdateMq = true;
}
if (mOutputFrameSize != prevOutputFrameSize ||
mCommon.output.frameCount != common.output.frameCount) {
mOutputMQ.reset();
needUpdateMq = true;
}
if (needUpdateMq) {
return notifyDataMqUpdate();
}
return RetCode::SUCCESS;
}
RetCode EffectContext::notifyDataMqUpdate() {
if (!mEfGroup) {
LOG(ERROR) << __func__ << ": invalid EventFlag group";
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
if (const auto ret = mEfGroup->wake(kEventFlagDataMqUpdate); ret != ::android::OK) {
LOG(ERROR) << __func__ << ": wake failure with ret " << ret;
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
LOG(VERBOSE) << __func__ << " : signal client for reopen";
return RetCode::SUCCESS;
}
RetCode EffectContext::enable() {
return RetCode::SUCCESS;
}
RetCode EffectContext::disable() {
return RetCode::SUCCESS;
}
RetCode EffectContext::reset() {
return RetCode::SUCCESS;
}
RetCode EffectContext::startDraining() {
mIsDraining = true;
return RetCode::SUCCESS;
}
RetCode EffectContext::finishDraining() {
mIsDraining = false;
return RetCode::SUCCESS;
}
bool EffectContext::isDraining() {
return mIsDraining;
}
} // namespace aidl::android::hardware::audio::effect

300
audio/EffectFactory.cpp Normal file
View File

@@ -0,0 +1,300 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <dlfcn.h>
#include <algorithm>
#include <iterator>
#include <memory>
#include <optional>
#include <tuple>
#include <unordered_set>
#define LOG_TAG "AHAL_EffectFactory"
#include <android-base/logging.h>
#include <android/binder_ibinder_platform.h>
#include <system/audio_aidl_utils.h>
#include <system/audio_effects/effect_uuid.h>
#include <system/thread_defs.h>
#include "effect-impl/EffectTypes.h"
#include "effectFactory-impl/EffectFactory.h"
using aidl::android::media::audio::common::AudioUuid;
namespace aidl::android::hardware::audio::effect {
Factory::Factory(const std::string& file) : mConfig(EffectConfig(file)) {
LOG(DEBUG) << __func__ << " with config file: " << file;
loadEffectLibs();
}
Factory::~Factory() {
if (auto count = mEffectMap.size()) {
LOG(WARNING) << __func__ << " remaining " << count
<< " effect instances not destroyed indicating resource leak!";
for (const auto& it : mEffectMap) {
if (auto spEffect = it.first.lock()) {
LOG(WARNING) << __func__ << " erase remaining instance UUID "
<< ::android::audio::utils::toString(it.second.first);
destroyEffectImpl_l(spEffect);
}
}
}
}
ndk::ScopedAStatus Factory::getDescriptorWithUuid_l(const AudioUuid& uuid, Descriptor* desc) {
RETURN_IF(!desc, EX_NULL_POINTER, "nullDescriptor");
if (mEffectLibMap.count(uuid)) {
auto& entry = mEffectLibMap[uuid];
getDlSyms_l(entry);
auto& libInterface = std::get<kMapEntryInterfaceIndex>(entry);
RETURN_IF(!libInterface || !libInterface->queryEffectFunc, EX_NULL_POINTER,
"dlNullQueryEffectFunc");
RETURN_IF_BINDER_EXCEPTION(libInterface->queryEffectFunc(&uuid, desc));
return ndk::ScopedAStatus::ok();
}
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
ndk::ScopedAStatus Factory::queryEffects(const std::optional<AudioUuid>& in_type_uuid,
const std::optional<AudioUuid>& in_impl_uuid,
const std::optional<AudioUuid>& in_proxy_uuid,
std::vector<Descriptor>* _aidl_return) {
std::lock_guard lg(mMutex);
// get the matching list
std::vector<Descriptor::Identity> idList;
std::copy_if(mIdentitySet.begin(), mIdentitySet.end(), std::back_inserter(idList),
[&](auto& id) {
return (!in_type_uuid.has_value() || in_type_uuid.value() == id.type) &&
(!in_impl_uuid.has_value() || in_impl_uuid.value() == id.uuid) &&
(!in_proxy_uuid.has_value() ||
(id.proxy.has_value() && in_proxy_uuid.value() == id.proxy.value()));
});
// query through the matching list
for (const auto& id : idList) {
if (mEffectLibMap.count(id.uuid)) {
Descriptor desc;
RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(id.uuid, &desc),
"getDescriptorFailed");
// update proxy UUID with information from config xml
desc.common.id.proxy = id.proxy;
_aidl_return->emplace_back(std::move(desc));
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::queryProcessing(const std::optional<Processing::Type>& in_type,
std::vector<Processing>* _aidl_return) {
std::lock_guard lg(mMutex);
const auto& processings = mConfig.getProcessingMap();
// Processing stream type
for (const auto& procIter : processings) {
if (!in_type.has_value() || in_type.value() == procIter.first) {
Processing process = {.type = procIter.first /* Processing::Type */};
for (const auto& libs : procIter.second /* std::vector<struct EffectLibraries> */) {
for (const auto& lib : libs.libraries /* std::vector<struct Library> */) {
Descriptor desc;
if (libs.proxyLibrary.has_value()) {
desc.common.id.proxy = libs.proxyLibrary.value().uuid;
}
RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(lib.uuid, &desc),
"getDescriptorFailed");
process.ids.emplace_back(desc);
}
}
_aidl_return->emplace_back(process);
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::createEffect(const AudioUuid& in_impl_uuid,
std::shared_ptr<IEffect>* _aidl_return) {
LOG(DEBUG) << __func__ << ": UUID " << ::android::audio::utils::toString(in_impl_uuid);
std::lock_guard lg(mMutex);
if (mEffectLibMap.count(in_impl_uuid)) {
auto& entry = mEffectLibMap[in_impl_uuid];
getDlSyms_l(entry);
auto& libInterface = std::get<kMapEntryInterfaceIndex>(entry);
RETURN_IF(!libInterface || !libInterface->createEffectFunc, EX_NULL_POINTER,
"dlNullcreateEffectFunc");
std::shared_ptr<IEffect> effectSp;
RETURN_IF_BINDER_EXCEPTION(libInterface->createEffectFunc(&in_impl_uuid, &effectSp));
if (!effectSp) {
LOG(WARNING) << __func__ << ": library created null instance without return error!";
return ndk::ScopedAStatus::fromExceptionCode(EX_TRANSACTION_FAILED);
}
*_aidl_return = effectSp;
ndk::SpAIBinder effectBinder = effectSp->asBinder();
AIBinder_setMinSchedulerPolicy(effectBinder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
AIBinder_setInheritRt(effectBinder.get(), true);
mEffectMap[std::weak_ptr<IEffect>(effectSp)] =
std::make_pair(in_impl_uuid, std::move(effectBinder));
return ndk::ScopedAStatus::ok();
} else {
LOG(ERROR) << __func__ << ": library doesn't exist";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::destroyEffectImpl_l(const std::shared_ptr<IEffect>& in_handle) {
std::weak_ptr<IEffect> wpHandle(in_handle);
// find the effect entry with key (std::weak_ptr<IEffect>)
if (auto effectIt = mEffectMap.find(wpHandle); effectIt != mEffectMap.end()) {
auto& uuid = effectIt->second.first;
// find implementation library with UUID
if (auto libIt = mEffectLibMap.find(uuid); libIt != mEffectLibMap.end()) {
auto& interface = std::get<kMapEntryInterfaceIndex>(libIt->second);
RETURN_IF(!interface || !interface->destroyEffectFunc, EX_NULL_POINTER,
"dlNulldestroyEffectFunc");
RETURN_IF_BINDER_EXCEPTION(interface->destroyEffectFunc(in_handle));
} else {
LOG(ERROR) << __func__ << ": UUID " << ::android::audio::utils::toString(uuid)
<< " does not exist in libMap!";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
mEffectMap.erase(effectIt);
return ndk::ScopedAStatus::ok();
} else {
LOG(ERROR) << __func__ << ": instance " << in_handle << " does not exist!";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
}
// go over the map and cleanup all expired weak_ptrs.
void Factory::cleanupEffectMap_l() {
for (auto it = mEffectMap.begin(); it != mEffectMap.end();) {
if (nullptr == it->first.lock()) {
it = mEffectMap.erase(it);
} else {
++it;
}
}
}
ndk::ScopedAStatus Factory::destroyEffect(const std::shared_ptr<IEffect>& in_handle) {
std::lock_guard lg(mMutex);
ndk::ScopedAStatus status = destroyEffectImpl_l(in_handle);
// always do the cleanup
cleanupEffectMap_l();
return status;
}
bool Factory::openEffectLibrary(const AudioUuid& impl,
const std::string& path) NO_THREAD_SAFETY_ANALYSIS {
std::function<void(void*)> dlClose = [](void* handle) -> void {
if (handle && dlclose(handle)) {
LOG(ERROR) << "dlclose failed " << dlerror();
}
};
auto libHandle =
std::unique_ptr<void, decltype(dlClose)>{dlopen(path.c_str(), RTLD_LAZY), dlClose};
if (!libHandle) {
LOG(ERROR) << __func__ << ": dlopen failed, err: " << dlerror();
return false;
}
LOG(DEBUG) << __func__ << " dlopen lib: " << path
<< "\nimpl:" << ::android::audio::utils::toString(impl) << "\nhandle:" << libHandle;
auto interface = new effect_dl_interface_s{nullptr, nullptr, nullptr};
mEffectLibMap.insert(
{impl,
std::make_tuple(std::move(libHandle),
std::unique_ptr<struct effect_dl_interface_s>(interface), path)});
return true;
}
void Factory::createIdentityWithConfig(
const EffectConfig::Library& configLib, const AudioUuid& typeUuid,
const std::optional<AudioUuid> proxyUuid) NO_THREAD_SAFETY_ANALYSIS {
static const auto& libMap = mConfig.getLibraryMap();
const std::string& libName = configLib.name;
if (auto path = libMap.find(libName); path != libMap.end()) {
Descriptor::Identity id;
id.type = typeUuid;
id.uuid = configLib.uuid;
id.proxy = proxyUuid;
LOG(WARNING) << __func__ << " loading lib " << path->second << ": typeUuid "
<< ::android::audio::utils::toString(id.type) << "\nimplUuid "
<< ::android::audio::utils::toString(id.uuid) << " proxyUuid "
<< (proxyUuid.has_value()
? ::android::audio::utils::toString(proxyUuid.value())
: "null");
if (openEffectLibrary(id.uuid, path->second)) {
mIdentitySet.insert(std::move(id));
}
} else {
LOG(ERROR) << __func__ << ": library " << libName << " not exist!";
return;
}
}
void Factory::loadEffectLibs() {
const auto& configEffectsMap = mConfig.getEffectsMap();
for (const auto& configEffects : configEffectsMap) {
if (AudioUuid type; EffectConfig::findUuid(configEffects /* xml effect */, &type)) {
const auto& configLibs = configEffects.second;
std::optional<AudioUuid> proxyUuid;
if (configLibs.proxyLibrary.has_value()) {
const auto& proxyLib = configLibs.proxyLibrary.value();
proxyUuid = proxyLib.uuid;
}
for (const auto& configLib : configLibs.libraries) {
createIdentityWithConfig(configLib, type, proxyUuid);
}
} else {
LOG(WARNING) << __func__ << ": can not find type UUID for effect "
<< configEffects.first << " skipping!";
}
}
}
void Factory::getDlSyms_l(DlEntry& entry) {
auto& dlHandle = std::get<kMapEntryHandleIndex>(entry);
RETURN_VALUE_IF(!dlHandle, void(), "dlNullHandle");
// Get the reference of the DL interfaces in library map tuple.
auto& dlInterface = std::get<kMapEntryInterfaceIndex>(entry);
// return if interface already exist
if (!dlInterface->createEffectFunc) {
dlInterface->createEffectFunc = (EffectCreateFunctor)dlsym(dlHandle.get(), "createEffect");
}
if (!dlInterface->queryEffectFunc) {
dlInterface->queryEffectFunc = (EffectQueryFunctor)dlsym(dlHandle.get(), "queryEffect");
}
if (!dlInterface->destroyEffectFunc) {
dlInterface->destroyEffectFunc =
(EffectDestroyFunctor)dlsym(dlHandle.get(), "destroyEffect");
}
if (!dlInterface->createEffectFunc || !dlInterface->destroyEffectFunc ||
!dlInterface->queryEffectFunc) {
LOG(ERROR) << __func__ << ": create (" << dlInterface->createEffectFunc << "), query ("
<< dlInterface->queryEffectFunc << "), or destroy ("
<< dlInterface->destroyEffectFunc
<< ") not exist in library: " << std::get<kMapEntryLibNameIndex>(entry)
<< " handle: " << dlHandle << " with dlerror: " << dlerror();
return;
}
}
} // namespace aidl::android::hardware::audio::effect

418
audio/EffectImpl.cpp Normal file
View File

@@ -0,0 +1,418 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <memory>
#define ATRACE_TAG ATRACE_TAG_AUDIO
#define LOG_TAG "AHAL_EffectImpl"
#include <utils/Trace.h>
#include "effect-impl/EffectImpl.h"
#include "effect-impl/EffectTypes.h"
#include "include/effect-impl/EffectTypes.h"
using aidl::android::hardware::audio::effect::CommandId;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::kDestroyAnyStateSupportedVersion;
using aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
using aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::PcmType;
using ::android::hardware::EventFlag;
extern "C" binder_exception_t destroyEffect(const std::shared_ptr<IEffect>& instanceSp) {
if (!instanceSp) {
LOG(ERROR) << __func__ << " nullptr";
return EX_ILLEGAL_ARGUMENT;
}
Descriptor desc;
ndk::ScopedAStatus status = instanceSp->getDescriptor(&desc);
if (!status.isOk()) {
LOG(ERROR) << __func__ << " instance " << instanceSp.get()
<< " failed to get descriptor, status: " << status.getDescription();
return EX_ILLEGAL_STATE;
}
State state;
status = instanceSp->getState(&state);
if (!status.isOk()) {
LOG(ERROR) << __func__ << " " << desc.common.name << " instance " << instanceSp.get()
<< " in state: " << toString(state) << ", status: " << status.getDescription();
return EX_ILLEGAL_STATE;
}
int effectVersion = 0;
if (!instanceSp->getInterfaceVersion(&effectVersion).isOk()) {
LOG(WARNING) << __func__ << " " << desc.common.name << " failed to get interface version";
}
if (effectVersion < kDestroyAnyStateSupportedVersion) {
if (State::INIT != state) {
LOG(ERROR) << __func__ << " " << desc.common.name << " can not destroy instance "
<< instanceSp.get() << " in state: " << toString(state);
return EX_ILLEGAL_STATE;
}
} else {
instanceSp->command(CommandId::RESET);
instanceSp->close();
}
LOG(DEBUG) << __func__ << " " << desc.common.name << " instance " << instanceSp.get()
<< " destroyed";
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
ndk::ScopedAStatus EffectImpl::open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
OpenEffectReturn* ret) {
// effect only support 32bits float
RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat");
std::lock_guard lg(mImplMutex);
RETURN_OK_IF(mState != State::INIT);
mImplContext = createContext(common);
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
"FailedToGetInterfaceVersion");
mImplContext->setVersion(mVersion);
mEventFlag = mImplContext->getStatusEventFlag();
mDataMqNotEmptyEf =
mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
if (specific.has_value()) {
RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
}
mState = State::IDLE;
mImplContext->dupeFmq(ret);
RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
LOG(INFO) << getEffectNameWithVersion() << __func__;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::reopen(OpenEffectReturn* ret) {
std::lock_guard lg(mImplMutex);
RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "alreadyClosed");
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
mImplContext->dupeFmqWithReopen(ret);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::close() {
{
std::lock_guard lg(mImplMutex);
RETURN_OK_IF(mState == State::INIT);
RETURN_IF(mState == State::PROCESSING, EX_ILLEGAL_STATE, "closeAtProcessing");
mState = State::INIT;
}
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
// stop the worker thread, ignore the return code
RETURN_IF(destroyThread() != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
"FailedToDestroyWorker");
{
std::lock_guard lg(mImplMutex);
releaseContext();
mImplContext.reset();
}
LOG(INFO) << getEffectNameWithVersion() << __func__;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::setParameter(const Parameter& param) {
std::lock_guard lg(mImplMutex);
LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << " with: " << param.toString();
const auto& tag = param.getTag();
switch (tag) {
case Parameter::common:
case Parameter::deviceDescription:
case Parameter::mode:
case Parameter::source:
FALLTHROUGH_INTENDED;
case Parameter::volumeStereo:
return setParameterCommon(param);
case Parameter::specific: {
return setParameterSpecific(param.get<Parameter::specific>());
}
default: {
LOG(ERROR) << getEffectNameWithVersion() << __func__ << " unsupportedParameterTag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"ParameterNotSupported");
}
}
}
ndk::ScopedAStatus EffectImpl::getParameter(const Parameter::Id& id, Parameter* param) {
std::lock_guard lg(mImplMutex);
switch (id.getTag()) {
case Parameter::Id::commonTag: {
RETURN_IF_ASTATUS_NOT_OK(getParameterCommon(id.get<Parameter::Id::commonTag>(), param),
"CommonParamNotSupported");
break;
}
case Parameter::Id::vendorEffectTag:
FALLTHROUGH_INTENDED;
default: {
Parameter::Specific specific;
RETURN_IF_ASTATUS_NOT_OK(getParameterSpecific(id, &specific), "SpecParamNotSupported");
param->set<Parameter::specific>(specific);
break;
}
}
LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << id.toString() << param->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::setParameterCommon(const Parameter& param) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
const auto& tag = param.getTag();
switch (tag) {
case Parameter::common:
RETURN_IF(mImplContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setCommFailed");
break;
case Parameter::deviceDescription:
RETURN_IF(mImplContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
break;
case Parameter::mode:
RETURN_IF(mImplContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setModeFailed");
break;
case Parameter::source:
RETURN_IF(mImplContext->setAudioSource(param.get<Parameter::source>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setSourceFailed");
break;
case Parameter::volumeStereo:
RETURN_IF(mImplContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
break;
default: {
LOG(ERROR) << getEffectNameWithVersion() << __func__ << " unsupportedParameterTag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"commonParamNotSupported");
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::getParameterCommon(const Parameter::Tag& tag, Parameter* param) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
switch (tag) {
case Parameter::common: {
param->set<Parameter::common>(mImplContext->getCommon());
break;
}
case Parameter::deviceDescription: {
param->set<Parameter::deviceDescription>(mImplContext->getOutputDevice());
break;
}
case Parameter::mode: {
param->set<Parameter::mode>(mImplContext->getAudioMode());
break;
}
case Parameter::source: {
param->set<Parameter::source>(mImplContext->getAudioSource());
break;
}
case Parameter::volumeStereo: {
param->set<Parameter::volumeStereo>(mImplContext->getVolumeStereo());
break;
}
default: {
LOG(DEBUG) << getEffectNameWithVersion() << __func__ << " unsupported tag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"tagNotSupported");
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::getState(State* state) NO_THREAD_SAFETY_ANALYSIS {
*state = mState;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::command(CommandId command) {
std::lock_guard lg(mImplMutex);
RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "instanceNotOpen");
switch (command) {
case CommandId::START:
RETURN_OK_IF(mState == State::PROCESSING);
RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed");
mState = State::PROCESSING;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
startThread();
break;
case CommandId::STOP:
RETURN_OK_IF(mState == State::IDLE);
mState = State::IDLE;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
stopThread();
RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed");
break;
case CommandId::RESET:
mState = State::IDLE;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
stopThread();
RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed");
break;
default:
LOG(ERROR) << getEffectNameWithVersion() << __func__ << " instance still processing";
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"CommandIdNotSupported");
}
LOG(VERBOSE) << getEffectNameWithVersion() << __func__
<< " transfer to state: " << toString(mState);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::commandImpl(CommandId command) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
switch (command) {
case CommandId::START:
mImplContext->enable();
break;
case CommandId::STOP:
mImplContext->disable();
break;
case CommandId::RESET:
mImplContext->disable();
mImplContext->reset();
mImplContext->resetBuffer();
break;
default:
LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"commandIdNotSupported");
}
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> EffectImpl::createContext(const Parameter::Common& common) {
return std::make_shared<EffectContext>(1 /* statusMqDepth */, common);
}
RetCode EffectImpl::releaseContext() {
if (mImplContext) {
mImplContext.reset();
}
return RetCode::SUCCESS;
}
void EffectImpl::cleanUp() {
command(CommandId::STOP);
close();
}
RetCode EffectImpl::notifyEventFlag(uint32_t flag) {
if (!mEventFlag) {
LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": StatusEventFlag invalid";
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
if (const auto ret = mEventFlag->wake(flag); ret != ::android::OK) {
LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": wake failure with ret " << ret;
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
LOG(VERBOSE) << getEffectNameWithVersion() << __func__ << ": " << std::hex << mEventFlag;
return RetCode::SUCCESS;
}
IEffect::Status EffectImpl::status(binder_status_t status, size_t consumed, size_t produced) {
IEffect::Status ret;
ret.status = status;
ret.fmqConsumed = consumed;
ret.fmqProduced = produced;
return ret;
}
void EffectImpl::process() {
ATRACE_NAME(getEffectNameWithVersion().c_str());
/**
* wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
* in the life cycle of workerThread (threadLoop).
*/
uint32_t efState = 0;
if (!mEventFlag ||
::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
true /* retry */) ||
!(efState & mDataMqNotEmptyEf)) {
LOG(ERROR) << getEffectNameWithVersion() << __func__ << ": StatusEventFlag - " << mEventFlag
<< " efState - " << std::hex << efState;
return;
}
{
std::lock_guard lg(mImplMutex);
if (mState != State::PROCESSING && mState != State::DRAINING) {
LOG(DEBUG) << getEffectNameWithVersion()
<< " skip process in state: " << toString(mState);
return;
}
RETURN_VALUE_IF(!mImplContext, void(), "nullContext");
auto statusMQ = mImplContext->getStatusFmq();
auto inputMQ = mImplContext->getInputDataFmq();
auto outputMQ = mImplContext->getOutputDataFmq();
auto buffer = mImplContext->getWorkBuffer();
if (!inputMQ || !outputMQ) {
return;
}
assert(mImplContext->getWorkBufferSize() >=
std::max(inputMQ->availableToRead(), outputMQ->availableToWrite()));
auto processSamples = std::min(inputMQ->availableToRead(), outputMQ->availableToWrite());
if (processSamples) {
inputMQ->read(buffer, processSamples);
IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
outputMQ->write(buffer, status.fmqProduced);
statusMQ->writeBlocking(&status, 1);
}
}
}
// A placeholder processing implementation to copy samples from input to output
IEffect::Status EffectImpl::effectProcessImpl(float* in, float* out, int samples) {
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
} // namespace aidl::android::hardware::audio::effect

73
audio/EffectMain.cpp Normal file
View File

@@ -0,0 +1,73 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "effectFactory-impl/EffectFactory.h"
#include <android-base/logging.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <system/audio_config.h>
#ifdef __ANDROID_APEX__
#include <android/apexsupport.h>
#endif
/** Default name of effect configuration file. */
static const char* kDefaultConfigName = "audio_effects_config.xml";
static inline std::string config_file_path() {
if constexpr (__ANDROID_VENDOR_API__ >= 202404) {
AApexInfo *apexInfo;
if (AApexInfo_create(&apexInfo) == AAPEXINFO_OK) {
std::string apexName(AApexInfo_getName(apexInfo));
AApexInfo_destroy(apexInfo);
std::string candidatePath("/apex/");
candidatePath.append(apexName).append("/etc/").append(kDefaultConfigName);
LOG(DEBUG) << __func__ << " effect lib path " << candidatePath;
if (access(candidatePath.c_str(), R_OK) == 0) {
return candidatePath;
}
}
} else {
LOG(DEBUG) << __func__ << " libapexsupport is not supported";
}
LOG(DEBUG) << __func__ << ": Unable to resolve config file path in APEX";
return android::audio_find_readable_configuration_file(kDefaultConfigName);
}
int main() {
// This is a debug implementation, always enable debug logging.
android::base::SetMinimumLogSeverity(::android::base::DEBUG);
ABinderProcess_setThreadPoolMaxThreadCount(0);
auto configFile = config_file_path();
if (configFile == "") {
LOG(ERROR) << __func__ << ": config file " << kDefaultConfigName << " not found!";
return EXIT_FAILURE;
}
LOG(DEBUG) << __func__ << ": start factory with configFile:" << configFile;
auto effectFactory =
ndk::SharedRefBase::make<aidl::android::hardware::audio::effect::Factory>(configFile);
std::string serviceName = std::string() + effectFactory->descriptor + "/default";
binder_status_t status =
AServiceManager_addService(effectFactory->asBinder().get(), serviceName.c_str());
CHECK_EQ(STATUS_OK, status);
LOG(DEBUG) << __func__ << ": effectFactory: " << serviceName << " start";
ABinderProcess_joinThreadPool();
return EXIT_FAILURE; // should not reach
}

130
audio/EffectThread.cpp Normal file
View File

@@ -0,0 +1,130 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstddef>
#include <memory>
#define LOG_TAG "AHAL_EffectThread"
#include <android-base/logging.h>
#include <pthread.h>
#include <sys/resource.h>
#include "effect-impl/EffectThread.h"
#include "effect-impl/EffectTypes.h"
namespace aidl::android::hardware::audio::effect {
EffectThread::~EffectThread() {
destroyThread();
}
RetCode EffectThread::createThread(const std::string& name, int priority) {
if (mThread.joinable()) {
LOG(WARNING) << mName << __func__ << " thread already created, no-op";
return RetCode::SUCCESS;
}
mName = name;
mPriority = priority;
{
std::lock_guard lg(mThreadMutex);
mStop = true;
mExit = false;
}
mThread = std::thread(&EffectThread::threadLoop, this);
LOG(VERBOSE) << mName << __func__ << " priority " << mPriority << " done";
return RetCode::SUCCESS;
}
RetCode EffectThread::destroyThread() {
{
std::lock_guard lg(mThreadMutex);
mStop = mExit = true;
}
mCv.notify_one();
if (mThread.joinable()) {
mThread.join();
}
LOG(VERBOSE) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::startThread() {
{
std::lock_guard lg(mThreadMutex);
if (mDraining) {
mDraining = false;
} else {
mStop = false;
}
mCv.notify_one();
}
LOG(VERBOSE) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::stopThread() {
{
std::lock_guard lg(mThreadMutex);
mStop = true;
mCv.notify_one();
}
LOG(VERBOSE) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::startDraining() {
std::lock_guard lg(mThreadMutex);
mDraining = true;
mCv.notify_one();
LOG(VERBOSE) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::finishDraining() {
std::lock_guard lg(mThreadMutex);
mDraining = false;
mStop = true;
mCv.notify_one();
LOG(VERBOSE) << mName << __func__;
return RetCode::SUCCESS;
}
void EffectThread::threadLoop() {
pthread_setname_np(pthread_self(), mName.substr(0, kMaxTaskNameLen - 1).c_str());
setpriority(PRIO_PROCESS, 0, mPriority);
while (true) {
{
std::unique_lock l(mThreadMutex);
::android::base::ScopedLockAssertion lock_assertion(mThreadMutex);
mCv.wait(l, [&]() REQUIRES(mThreadMutex) { return mExit || !mStop; });
if (mExit) {
LOG(VERBOSE) << mName << " threadLoop EXIT!";
return;
}
}
process();
}
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,268 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <fcntl.h>
#include <inttypes.h>
#include <unistd.h>
#include <functional>
#include <unordered_map>
#define LOG_TAG "AHAL_Config"
#include <aidl/android/media/audio/common/AudioFlag.h>
#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
#include <aidl/android/media/audio/common/AudioProductStrategyType.h>
#include <android-base/logging.h>
#include "core-impl/CapEngineConfigXmlConverter.h"
#include "core-impl/EngineConfigXmlConverter.h"
#include "core-impl/XsdcConversion.h"
using aidl::android::hardware::audio::core::internal::CapEngineConfigXmlConverter;
using aidl::android::hardware::audio::core::internal::convertAudioUsageToAidl;
using aidl::android::media::audio::common::AudioAttributes;
using aidl::android::media::audio::common::AudioContentType;
using aidl::android::media::audio::common::AudioFlag;
using aidl::android::media::audio::common::AudioHalAttributesGroup;
using aidl::android::media::audio::common::AudioHalCapCriterion;
using aidl::android::media::audio::common::AudioHalCapCriterionType;
using aidl::android::media::audio::common::AudioHalCapCriterionV2;
using aidl::android::media::audio::common::AudioHalEngineConfig;
using aidl::android::media::audio::common::AudioHalProductStrategy;
using aidl::android::media::audio::common::AudioHalVolumeCurve;
using aidl::android::media::audio::common::AudioHalVolumeGroup;
using aidl::android::media::audio::common::AudioProductStrategyType;
using aidl::android::media::audio::common::AudioSource;
using aidl::android::media::audio::common::AudioStreamType;
using aidl::android::media::audio::common::AudioUsage;
using ::android::BAD_VALUE;
using ::android::base::unexpected;
namespace eng_xsd = android::audio::policy::engine::configuration;
namespace aidl::android::hardware::audio::core::internal {
/** Default path of audio policy cap engine configuration file. */
static constexpr char kCapEngineConfigFileName[] =
"/parameter-framework/Settings/Policy/PolicyConfigurableDomains.xml";
ConversionResult<int> EngineConfigXmlConverter::convertProductStrategyNameToAidl(
const std::string& xsdcProductStrategyName) {
const auto [it, success] = mProductStrategyMap.insert(
std::make_pair(xsdcProductStrategyName, mNextVendorStrategy));
if (success) {
mNextVendorStrategy++;
}
return it->second;
}
ConversionResult<int> EngineConfigXmlConverter::convertProductStrategyIdToAidl(int xsdcId) {
if (xsdcId < AudioHalProductStrategy::VENDOR_STRATEGY_ID_START) {
return unexpected(BAD_VALUE);
}
return xsdcId;
}
bool isDefaultAudioAttributes(const AudioAttributes& attributes) {
return ((attributes.contentType == AudioContentType::UNKNOWN) &&
(attributes.usage == AudioUsage::UNKNOWN) &&
(attributes.source == AudioSource::DEFAULT) && (attributes.flags == 0) &&
(attributes.tags.empty()));
}
ConversionResult<AudioAttributes> EngineConfigXmlConverter::convertAudioAttributesToAidl(
const eng_xsd::AttributesType& xsdcAudioAttributes) {
if (xsdcAudioAttributes.hasAttributesRef()) {
if (mAttributesReferenceMap.empty()) {
mAttributesReferenceMap =
generateReferenceMap<eng_xsd::AttributesRef, eng_xsd::AttributesRefType>(
getXsdcConfig()->getAttributesRef());
}
return convertAudioAttributesToAidl(
*(mAttributesReferenceMap.at(xsdcAudioAttributes.getAttributesRef())
.getFirstAttributes()));
}
AudioAttributes aidlAudioAttributes;
if (xsdcAudioAttributes.hasContentType()) {
aidlAudioAttributes.contentType = VALUE_OR_FATAL(convertAudioContentTypeToAidl(
xsdcAudioAttributes.getFirstContentType()->getValue()));
}
if (xsdcAudioAttributes.hasUsage()) {
aidlAudioAttributes.usage = VALUE_OR_FATAL(
convertAudioUsageToAidl(xsdcAudioAttributes.getFirstUsage()->getValue()));
}
if (xsdcAudioAttributes.hasSource()) {
aidlAudioAttributes.source = VALUE_OR_FATAL(
convertAudioSourceToAidl(xsdcAudioAttributes.getFirstSource()->getValue()));
}
if (xsdcAudioAttributes.hasFlags()) {
std::vector<eng_xsd::FlagType> xsdcFlagTypeVec =
xsdcAudioAttributes.getFirstFlags()->getValue();
aidlAudioAttributes.flags = VALUE_OR_FATAL(convertAudioFlagsToAidl(xsdcFlagTypeVec));
}
if (xsdcAudioAttributes.hasBundle()) {
const eng_xsd::BundleType* xsdcBundle = xsdcAudioAttributes.getFirstBundle();
aidlAudioAttributes.tags.reserve(1);
aidlAudioAttributes.tags.push_back(xsdcBundle->getKey() + "_" + xsdcBundle->getValue());
}
if (isDefaultAudioAttributes(aidlAudioAttributes)) {
mDefaultProductStrategyId = std::optional<int>{-1};
}
return aidlAudioAttributes;
}
ConversionResult<AudioHalAttributesGroup> EngineConfigXmlConverter::convertAttributesGroupToAidl(
const eng_xsd::AttributesGroup& xsdcAttributesGroup) {
AudioHalAttributesGroup aidlAttributesGroup;
static const int kStreamTypeEnumOffset =
static_cast<int>(eng_xsd::Stream::AUDIO_STREAM_VOICE_CALL) -
static_cast<int>(AudioStreamType::VOICE_CALL);
aidlAttributesGroup.streamType = xsdcAttributesGroup.hasStreamType()
? VALUE_OR_FATAL(convertAudioStreamTypeToAidl(
xsdcAttributesGroup.getStreamType()))
: AudioStreamType::INVALID;
aidlAttributesGroup.volumeGroupName = xsdcAttributesGroup.getVolumeGroup();
if (xsdcAttributesGroup.hasAttributes_optional()) {
aidlAttributesGroup.attributes =
VALUE_OR_FATAL((convertCollectionToAidl<eng_xsd::AttributesType, AudioAttributes>(
xsdcAttributesGroup.getAttributes_optional(),
std::bind(&EngineConfigXmlConverter::convertAudioAttributesToAidl, this,
std::placeholders::_1))));
} else if (xsdcAttributesGroup.hasContentType_optional() ||
xsdcAttributesGroup.hasUsage_optional() ||
xsdcAttributesGroup.hasSource_optional() ||
xsdcAttributesGroup.hasFlags_optional() ||
xsdcAttributesGroup.hasBundle_optional()) {
aidlAttributesGroup.attributes.push_back(VALUE_OR_FATAL(convertAudioAttributesToAidl(
eng_xsd::AttributesType(xsdcAttributesGroup.getContentType_optional(),
xsdcAttributesGroup.getUsage_optional(),
xsdcAttributesGroup.getSource_optional(),
xsdcAttributesGroup.getFlags_optional(),
xsdcAttributesGroup.getBundle_optional(), std::nullopt))));
} else {
LOG(ERROR) << __func__ << " Review Audio Policy config: no audio attributes provided for "
<< aidlAttributesGroup.toString();
return unexpected(BAD_VALUE);
}
return aidlAttributesGroup;
}
ConversionResult<AudioHalProductStrategy> EngineConfigXmlConverter::convertProductStrategyToAidl(
const eng_xsd::ProductStrategies::ProductStrategy& xsdcProductStrategy) {
AudioHalProductStrategy aidlProductStrategy;
if (xsdcProductStrategy.hasId()) {
aidlProductStrategy.id =
VALUE_OR_FATAL(convertProductStrategyIdToAidl(xsdcProductStrategy.getId()));
} else {
aidlProductStrategy.id =
VALUE_OR_FATAL(convertProductStrategyNameToAidl(xsdcProductStrategy.getName()));
}
aidlProductStrategy.name = xsdcProductStrategy.getName();
if (xsdcProductStrategy.hasAttributesGroup()) {
aidlProductStrategy.attributesGroups = VALUE_OR_FATAL(
(convertCollectionToAidl<eng_xsd::AttributesGroup, AudioHalAttributesGroup>(
xsdcProductStrategy.getAttributesGroup(),
std::bind(&EngineConfigXmlConverter::convertAttributesGroupToAidl, this,
std::placeholders::_1))));
}
if ((mDefaultProductStrategyId != std::nullopt) && (mDefaultProductStrategyId.value() == -1)) {
mDefaultProductStrategyId = aidlProductStrategy.id;
}
return aidlProductStrategy;
}
ConversionResult<AudioHalVolumeCurve> EngineConfigXmlConverter::convertVolumeCurveToAidl(
const eng_xsd::Volume& xsdcVolumeCurve) {
AudioHalVolumeCurve aidlVolumeCurve;
aidlVolumeCurve.deviceCategory =
static_cast<AudioHalVolumeCurve::DeviceCategory>(xsdcVolumeCurve.getDeviceCategory());
if (xsdcVolumeCurve.hasRef()) {
if (mVolumesReferenceMap.empty()) {
mVolumesReferenceMap = generateReferenceMap<eng_xsd::VolumesType, eng_xsd::VolumeRef>(
getXsdcConfig()->getVolumes());
}
aidlVolumeCurve.curvePoints = VALUE_OR_FATAL(
(convertCollectionToAidl<std::string, AudioHalVolumeCurve::CurvePoint>(
mVolumesReferenceMap.at(xsdcVolumeCurve.getRef()).getPoint(),
&convertCurvePointToAidl)));
} else {
aidlVolumeCurve.curvePoints = VALUE_OR_FATAL(
(convertCollectionToAidl<std::string, AudioHalVolumeCurve::CurvePoint>(
xsdcVolumeCurve.getPoint(), &convertCurvePointToAidl)));
}
return aidlVolumeCurve;
}
ConversionResult<AudioHalVolumeGroup> EngineConfigXmlConverter::convertVolumeGroupToAidl(
const eng_xsd::VolumeGroupsType::VolumeGroup& xsdcVolumeGroup) {
AudioHalVolumeGroup aidlVolumeGroup;
aidlVolumeGroup.name = xsdcVolumeGroup.getName();
aidlVolumeGroup.minIndex = xsdcVolumeGroup.getIndexMin();
aidlVolumeGroup.maxIndex = xsdcVolumeGroup.getIndexMax();
aidlVolumeGroup.volumeCurves =
VALUE_OR_FATAL((convertCollectionToAidl<eng_xsd::Volume, AudioHalVolumeCurve>(
xsdcVolumeGroup.getVolume(),
std::bind(&EngineConfigXmlConverter::convertVolumeCurveToAidl, this,
std::placeholders::_1))));
return aidlVolumeGroup;
}
AudioHalEngineConfig& EngineConfigXmlConverter::getAidlEngineConfig() {
return mAidlEngineConfig;
}
void EngineConfigXmlConverter::init() {
mProductStrategyMap = getLegacyProductStrategyMap();
if (getXsdcConfig()->hasProductStrategies()) {
mAidlEngineConfig.productStrategies = VALUE_OR_FATAL(
(convertWrappedCollectionToAidl<eng_xsd::ProductStrategies,
eng_xsd::ProductStrategies::ProductStrategy,
AudioHalProductStrategy>(
getXsdcConfig()->getProductStrategies(),
&eng_xsd::ProductStrategies::getProductStrategy,
std::bind(&EngineConfigXmlConverter::convertProductStrategyToAidl, this,
std::placeholders::_1))));
if (mDefaultProductStrategyId) {
mAidlEngineConfig.defaultProductStrategyId = mDefaultProductStrategyId.value();
}
}
if (getXsdcConfig()->hasVolumeGroups()) {
mAidlEngineConfig.volumeGroups = VALUE_OR_FATAL(
(convertWrappedCollectionToAidl<eng_xsd::VolumeGroupsType,
eng_xsd::VolumeGroupsType::VolumeGroup,
AudioHalVolumeGroup>(
getXsdcConfig()->getVolumeGroups(),
&eng_xsd::VolumeGroupsType::getVolumeGroup,
std::bind(&EngineConfigXmlConverter::convertVolumeGroupToAidl, this,
std::placeholders::_1))));
}
if (getXsdcConfig()->hasCriteria() && getXsdcConfig()->hasCriterion_types()) {
AudioHalEngineConfig::CapSpecificConfig capSpecificConfig;
capSpecificConfig.criteriaV2 =
std::make_optional<>(VALUE_OR_FATAL((convertCapCriteriaCollectionToAidl(
getXsdcConfig()->getCriteria(), getXsdcConfig()->getCriterion_types()))));
internal::CapEngineConfigXmlConverter capEngConfigConverter{
::android::audio_find_readable_configuration_file(kCapEngineConfigFileName)};
if (capEngConfigConverter.getStatus() == ::android::OK) {
capSpecificConfig.domains = std::move(capEngConfigConverter.getAidlCapEngineConfig());
}
mAidlEngineConfig.capSpecificConfig = capSpecificConfig;
}
}
} // namespace aidl::android::hardware::audio::core::internal

1841
audio/Module.cpp Normal file

File diff suppressed because it is too large Load Diff

132
audio/ModulePrimary.cpp Normal file
View File

@@ -0,0 +1,132 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <vector>
#define LOG_TAG "AHAL_ModulePrimary"
#include <Utils.h>
#include <android-base/logging.h>
#include "core-impl/ModulePrimary.h"
#include "core-impl/StreamMmapStub.h"
#include "core-impl/StreamOffloadStub.h"
#include "core-impl/StreamPrimary.h"
#include "core-impl/Telephony.h"
using aidl::android::hardware::audio::common::areAllBitPositionFlagsSet;
using aidl::android::hardware::audio::common::hasMmapFlag;
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::hardware::audio::core::StreamDescriptor;
using aidl::android::media::audio::common::AudioInputFlags;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioOffloadInfo;
using aidl::android::media::audio::common::AudioOutputFlags;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::AudioPortExt;
using aidl::android::media::audio::common::MicrophoneInfo;
namespace aidl::android::hardware::audio::core {
ndk::ScopedAStatus ModulePrimary::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
if (!mTelephony) {
mTelephony = ndk::SharedRefBase::make<Telephony>();
}
*_aidl_return = mTelephony.getInstance();
LOG(DEBUG) << __func__
<< ": returning instance of ITelephony: " << _aidl_return->get()->asBinder().get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModulePrimary::calculateBufferSizeFrames(
const ::aidl::android::media::audio::common::AudioFormatDescription& format,
int32_t latencyMs, int32_t sampleRateHz, int32_t* bufferSizeFrames) {
if (format.type != ::aidl::android::media::audio::common::AudioFormatType::PCM &&
StreamOffloadStub::getSupportedEncodings().count(format.encoding)) {
*bufferSizeFrames = sampleRateHz / 2; // 1/2 of a second.
return ndk::ScopedAStatus::ok();
}
return Module::calculateBufferSizeFrames(format, latencyMs, sampleRateHz, bufferSizeFrames);
}
ndk::ScopedAStatus ModulePrimary::createInputStream(StreamContext&& context,
const SinkMetadata& sinkMetadata,
const std::vector<MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) {
if (context.isMmap()) {
// "Stub" is used because there is no support for MMAP audio I/O on CVD.
return createStreamInstance<StreamInMmapStub>(result, std::move(context), sinkMetadata,
microphones);
}
return createStreamInstance<StreamInPrimary>(result, std::move(context), sinkMetadata,
microphones);
}
ndk::ScopedAStatus ModulePrimary::createOutputStream(
StreamContext&& context, const SourceMetadata& sourceMetadata,
const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
if (context.isMmap()) {
// "Stub" is used because there is no support for MMAP audio I/O on CVD.
return createStreamInstance<StreamOutMmapStub>(result, std::move(context), sourceMetadata,
offloadInfo);
} else if (areAllBitPositionFlagsSet(
context.getFlags().get<AudioIoFlags::output>(),
{AudioOutputFlags::COMPRESS_OFFLOAD, AudioOutputFlags::NON_BLOCKING})) {
// "Stub" is used because there is no actual decoder. The stream just
// extracts the clip duration from the media file header and simulates
// playback over time.
return createStreamInstance<StreamOutOffloadStub>(result, std::move(context),
sourceMetadata, offloadInfo);
}
return createStreamInstance<StreamOutPrimary>(result, std::move(context), sourceMetadata,
offloadInfo);
}
ndk::ScopedAStatus ModulePrimary::createMmapBuffer(const AudioPortConfig& portConfig,
int32_t bufferSizeFrames, int32_t frameSizeBytes,
MmapBufferDescriptor* desc) {
const size_t bufferSizeBytes = static_cast<size_t>(bufferSizeFrames) * frameSizeBytes;
// The actual mmap buffer for I/O is created after the stream exits standby, via
// 'IStreamCommon.createMmapBuffer'. But we must return a valid file descriptor here because
// 'MmapBufferDescriptor' can not contain a "null" fd.
const std::string regionName =
std::string("mmap-sim-o-") +
std::to_string(portConfig.ext.get<AudioPortExt::Tag::mix>().handle);
int fd = ashmem_create_region(regionName.c_str(), bufferSizeBytes);
if (fd < 0) {
PLOG(ERROR) << __func__ << ": failed to create shared memory region of " << bufferSizeBytes
<< " bytes";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
desc->sharedMemory.fd = ndk::ScopedFileDescriptor(fd);
desc->sharedMemory.size = bufferSizeBytes;
desc->burstSizeFrames = bufferSizeFrames / 2;
desc->flags = 0;
LOG(DEBUG) << __func__ << ": " << desc->toString();
return ndk::ScopedAStatus::ok();
}
int32_t ModulePrimary::getNominalLatencyMs(const AudioPortConfig& portConfig) {
static constexpr int32_t kLowLatencyMs = 5;
// 85 ms is chosen considering 4096 frames @ 48 kHz. This is the value which allows
// the virtual Android device implementation to pass CTS. Hardware implementations
// should have significantly lower latency.
static constexpr int32_t kStandardLatencyMs = 85;
return hasMmapFlag(portConfig.flags.value()) ? kLowLatencyMs : kStandardLatencyMs;
}
} // namespace aidl::android::hardware::audio::core

147
audio/SoundDose.cpp Normal file
View File

@@ -0,0 +1,147 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_SoundDose"
#include "core-impl/SoundDose.h"
#include <aidl/android/hardware/audio/core/sounddose/ISoundDose.h>
#include <android-base/logging.h>
#include <media/AidlConversionCppNdk.h>
#include <utils/Timers.h>
using aidl::android::hardware::audio::core::sounddose::ISoundDose;
using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioFormatDescription;
namespace aidl::android::hardware::audio::core::sounddose {
ndk::ScopedAStatus SoundDose::setOutputRs2UpperBound(float in_rs2ValueDbA) {
if (in_rs2ValueDbA < MIN_RS2 || in_rs2ValueDbA > DEFAULT_MAX_RS2) {
LOG(ERROR) << __func__ << ": RS2 value is invalid: " << in_rs2ValueDbA;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
::android::audio_utils::lock_guard l(mMutex);
mRs2Value = in_rs2ValueDbA;
if (mMelProcessor != nullptr) {
mMelProcessor->setOutputRs2UpperBound(in_rs2ValueDbA);
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus SoundDose::getOutputRs2UpperBound(float* _aidl_return) {
::android::audio_utils::lock_guard l(mMutex);
*_aidl_return = mRs2Value;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus SoundDose::registerSoundDoseCallback(
const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& in_callback) {
if (in_callback.get() == nullptr) {
LOG(ERROR) << __func__ << ": Callback is nullptr";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
::android::audio_utils::lock_guard l(mCbMutex);
if (mCallback != nullptr) {
LOG(ERROR) << __func__ << ": Sound dose callback was already registered";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
mCallback = in_callback;
LOG(DEBUG) << __func__ << ": Registered sound dose callback ";
return ndk::ScopedAStatus::ok();
}
void SoundDose::setAudioDevice(const AudioDevice& audioDevice) {
::android::audio_utils::lock_guard l(mCbMutex);
mAudioDevice = audioDevice;
}
void SoundDose::startDataProcessor(uint32_t sampleRate, uint32_t channelCount,
const AudioFormatDescription& aidlFormat) {
::android::audio_utils::lock_guard l(mMutex);
const auto result = aidl2legacy_AudioFormatDescription_audio_format_t(aidlFormat);
const audio_format_t format = result.value_or(AUDIO_FORMAT_INVALID);
if (mMelProcessor == nullptr) {
// we don't have the deviceId concept on the vendor side so just pass 0
mMelProcessor = ::android::sp<::android::audio_utils::MelProcessor>::make(
sampleRate, channelCount, format, mMelCallback, /*deviceId=*/0, mRs2Value);
} else {
mMelProcessor->updateAudioFormat(sampleRate, channelCount, format);
}
}
void SoundDose::process(const void* buffer, size_t bytes) {
::android::audio_utils::lock_guard l(mMutex);
if (mMelProcessor != nullptr) {
mMelProcessor->process(buffer, bytes);
}
}
void SoundDose::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
audio_port_handle_t deviceId __attribute__((__unused__))) const {
::android::audio_utils::lock_guard l(mCbMutex);
if (!mAudioDevice.has_value()) {
LOG(WARNING) << __func__ << ": New mel values without a registered device";
return;
}
if (mCallback == nullptr) {
LOG(ERROR) << __func__ << ": New mel values without a registered callback";
return;
}
ISoundDose::IHalSoundDoseCallback::MelRecord melRecord;
melRecord.timestamp = nanoseconds_to_seconds(systemTime());
melRecord.melValues = std::vector<float>(mels.begin() + offset, mels.begin() + offset + length);
mCallback->onNewMelValues(melRecord, mAudioDevice.value());
}
void SoundDose::MelCallback::onNewMelValues(const std::vector<float>& mels, size_t offset,
size_t length,
audio_port_handle_t deviceId
__attribute__((__unused__)),
bool attenuated __attribute__((__unused__))) const {
mSoundDose.onNewMelValues(mels, offset, length, deviceId);
}
void SoundDose::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId
__attribute__((__unused__))) const {
::android::audio_utils::lock_guard l(mCbMutex);
if (!mAudioDevice.has_value()) {
LOG(WARNING) << __func__ << ": Momentary exposure without a registered device";
return;
}
if (mCallback == nullptr) {
LOG(ERROR) << __func__ << ": Momentary exposure without a registered callback";
return;
}
mCallback->onMomentaryExposureWarning(currentMel, mAudioDevice.value());
}
void SoundDose::MelCallback::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId
__attribute__((__unused__))) const {
mSoundDose.onMomentaryExposure(currentMel, deviceId);
}
} // namespace aidl::android::hardware::audio::core::sounddose

1195
audio/Stream.cpp Normal file

File diff suppressed because it is too large Load Diff

82
audio/Telephony.cpp Normal file
View File

@@ -0,0 +1,82 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_Telephony"
#include <android-base/logging.h>
#include <android/binder_to_string.h>
#include <Utils.h>
#include "core-impl/Telephony.h"
using aidl::android::hardware::audio::common::isValidAudioMode;
using aidl::android::media::audio::common::AudioMode;
using aidl::android::media::audio::common::Boolean;
using aidl::android::media::audio::common::Float;
namespace aidl::android::hardware::audio::core {
Telephony::Telephony() {
mTelecomConfig.voiceVolume = Float{TelecomConfig::VOICE_VOLUME_MAX};
mTelecomConfig.ttyMode = TelecomConfig::TtyMode::OFF;
mTelecomConfig.isHacEnabled = Boolean{false};
}
ndk::ScopedAStatus Telephony::getSupportedAudioModes(std::vector<AudioMode>* _aidl_return) {
*_aidl_return = mSupportedAudioModes;
LOG(DEBUG) << __func__ << ": returning " << ::android::internal::ToString(*_aidl_return);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Telephony::switchAudioMode(AudioMode in_mode) {
if (!isValidAudioMode(in_mode)) {
LOG(ERROR) << __func__ << ": invalid mode " << toString(in_mode);
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (std::find(mSupportedAudioModes.begin(), mSupportedAudioModes.end(), in_mode) !=
mSupportedAudioModes.end()) {
LOG(DEBUG) << __func__ << ": " << toString(in_mode);
return ndk::ScopedAStatus::ok();
}
LOG(ERROR) << __func__ << ": unsupported mode " << toString(in_mode);
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus Telephony::setTelecomConfig(const TelecomConfig& in_config,
TelecomConfig* _aidl_return) {
if (in_config.voiceVolume.has_value() &&
(in_config.voiceVolume.value().value < TelecomConfig::VOICE_VOLUME_MIN ||
in_config.voiceVolume.value().value > TelecomConfig::VOICE_VOLUME_MAX)) {
LOG(ERROR) << __func__
<< ": voice volume value is invalid: " << in_config.voiceVolume.value().value;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (in_config.voiceVolume.has_value()) {
mTelecomConfig.voiceVolume = in_config.voiceVolume;
}
if (in_config.ttyMode != TelecomConfig::TtyMode::UNSPECIFIED) {
mTelecomConfig.ttyMode = in_config.ttyMode;
}
if (in_config.isHacEnabled.has_value()) {
mTelecomConfig.isHacEnabled = in_config.isHacEnabled;
}
*_aidl_return = mTelecomConfig;
LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning "
<< _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::audio::core

838
audio/XsdcConversion.cpp Normal file
View File

@@ -0,0 +1,838 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <inttypes.h>
#include <unordered_set>
#define LOG_TAG "AHAL_Config"
#include <android-base/logging.h>
#include <android-base/strings.h>
#include <android/binder_enums.h>
#include <aidl/android/media/audio/common/AudioPort.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
#include <media/AidlConversionCppNdk.h>
#include <media/TypeConverter.h>
#include <media/convert.h>
#include <utils/FastStrcmp.h>
#include <Utils.h>
#include "core-impl/XmlConverter.h"
#include "core-impl/XsdcConversion.h"
using aidl::android::hardware::audio::common::iequals;
using aidl::android::hardware::audio::common::isValidAudioMode;
using aidl::android::hardware::audio::common::kValidAudioModes;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioContentType;
using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioGain;
using aidl::android::media::audio::common::AudioHalCapCriterion;
using aidl::android::media::audio::common::AudioHalCapCriterionType;
using aidl::android::media::audio::common::AudioHalCapCriterionV2;
using aidl::android::media::audio::common::AudioHalVolumeCurve;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioMode;
using aidl::android::media::audio::common::AudioPolicyForceUse;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::AudioPortDeviceExt;
using aidl::android::media::audio::common::AudioPortExt;
using aidl::android::media::audio::common::AudioPortMixExt;
using aidl::android::media::audio::common::AudioProfile;
using aidl::android::media::audio::common::AudioSource;
using aidl::android::media::audio::common::AudioStreamType;
using aidl::android::media::audio::common::AudioUsage;
using android::BAD_VALUE;
using android::base::unexpected;
using android::utilities::convertTo;
using ndk::enum_range;
namespace ap_xsd = android::audio::policy::configuration;
namespace eng_xsd = android::audio::policy::engine::configuration;
namespace aidl::android::hardware::audio::core::internal {
static constexpr const char kXsdcForceConfigForCommunication[] = "ForceUseForCommunication";
static constexpr const char kXsdcForceConfigForMedia[] = "ForceUseForMedia";
static constexpr const char kXsdcForceConfigForRecord[] = "ForceUseForRecord";
static constexpr const char kXsdcForceConfigForDock[] = "ForceUseForDock";
static constexpr const char kXsdcForceConfigForSystem[] = "ForceUseForSystem";
static constexpr const char kXsdcForceConfigForHdmiSystemAudio[] = "ForceUseForHdmiSystemAudio";
static constexpr const char kXsdcForceConfigForEncodedSurround[] = "ForceUseForEncodedSurround";
static constexpr const char kXsdcForceConfigForVibrateRinging[] = "ForceUseForVibrateRinging";
inline ConversionResult<std::string> assertNonEmpty(const std::string& s) {
if (s.empty()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: "
<< " empty string is not valid.";
return unexpected(BAD_VALUE);
}
return s;
}
#define NON_EMPTY_STRING_OR_FATAL(s) VALUE_OR_FATAL(assertNonEmpty(s))
ConversionResult<int32_t> convertAudioFlagsToAidl(
const std::vector<eng_xsd::FlagType>& xsdcFlagTypeVec) {
int legacyFlagMask = 0;
for (const eng_xsd::FlagType& xsdcFlagType : xsdcFlagTypeVec) {
if (xsdcFlagType != eng_xsd::FlagType::AUDIO_FLAG_NONE) {
audio_flags_mask_t legacyFlag = AUDIO_FLAG_NONE;
if (!::android::AudioFlagConverter::fromString(eng_xsd::toString(xsdcFlagType),
legacyFlag)) {
LOG(ERROR) << __func__ << " Review Audio Policy config, "
<< eng_xsd::toString(xsdcFlagType) << " is not a valid flag.";
return unexpected(BAD_VALUE);
}
legacyFlagMask |= static_cast<int>(legacyFlag);
}
}
ConversionResult<int32_t> result = legacy2aidl_audio_flags_mask_t_int32_t_mask(
static_cast<audio_flags_mask_t>(legacyFlagMask));
if (!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyFlagMask
<< " has invalid flag(s).";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioStreamType> convertAudioStreamTypeToAidl(const eng_xsd::Stream& xsdcStream) {
audio_stream_type_t legacyStreamType;
if (!::android::StreamTypeConverter::fromString(eng_xsd::toString(xsdcStream),
legacyStreamType)) {
LOG(ERROR) << __func__ << " Review Audio Policy config, " << eng_xsd::toString(xsdcStream)
<< " is not a valid audio stream type.";
return unexpected(BAD_VALUE);
}
ConversionResult<AudioStreamType> result =
legacy2aidl_audio_stream_type_t_AudioStreamType(legacyStreamType);
if (!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyStreamType
<< " is not a valid audio stream type.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioSource> convertAudioSourceToAidl(
const eng_xsd::SourceEnumType& xsdcSourceType) {
audio_source_t legacySourceType;
if (!::android::SourceTypeConverter::fromString(eng_xsd::toString(xsdcSourceType),
legacySourceType)) {
LOG(ERROR) << __func__ << " Review Audio Policy config, "
<< eng_xsd::toString(xsdcSourceType) << " is not a valid audio source.";
return unexpected(BAD_VALUE);
}
ConversionResult<AudioSource> result = legacy2aidl_audio_source_t_AudioSource(legacySourceType);
if (!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacySourceType
<< " is not a valid audio source.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioContentType> convertAudioContentTypeToAidl(
const eng_xsd::ContentType& xsdcContentType) {
audio_content_type_t legacyContentType;
if (!::android::AudioContentTypeConverter::fromString(eng_xsd::toString(xsdcContentType),
legacyContentType)) {
LOG(ERROR) << __func__ << " Review Audio Policy config, "
<< eng_xsd::toString(xsdcContentType) << " is not a valid audio content type.";
return unexpected(BAD_VALUE);
}
ConversionResult<AudioContentType> result =
legacy2aidl_audio_content_type_t_AudioContentType(legacyContentType);
if (!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config, " << legacyContentType
<< " is not a valid audio content type.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioUsage> convertAudioUsageToAidl(const eng_xsd::UsageEnumType& xsdcUsage) {
audio_usage_t legacyUsage;
if (!::android::UsageTypeConverter::fromString(eng_xsd::toString(xsdcUsage), legacyUsage)) {
LOG(ERROR) << __func__ << " Review Audio Policy config, not a valid audio usage.";
return unexpected(BAD_VALUE);
}
ConversionResult<AudioUsage> result = legacy2aidl_audio_usage_t_AudioUsage(legacyUsage);
if (!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config, not a valid audio usage.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioFormatDescription> convertAudioFormatToAidl(const std::string& xsdcFormat) {
audio_format_t legacyFormat = ::android::formatFromString(xsdcFormat, AUDIO_FORMAT_DEFAULT);
ConversionResult<AudioFormatDescription> result =
legacy2aidl_audio_format_t_AudioFormatDescription(legacyFormat);
if ((legacyFormat == AUDIO_FORMAT_DEFAULT && xsdcFormat.compare("AUDIO_FORMAT_DEFAULT") != 0) ||
!result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: " << xsdcFormat
<< " is not a valid audio format.";
return unexpected(BAD_VALUE);
}
return result;
}
std::unordered_set<std::string> getAttachedDevices(const ap_xsd::Modules::Module& moduleConfig) {
std::unordered_set<std::string> attachedDeviceSet;
if (moduleConfig.hasAttachedDevices()) {
for (const ap_xsd::AttachedDevices& attachedDevices : moduleConfig.getAttachedDevices()) {
if (attachedDevices.hasItem()) {
attachedDeviceSet.insert(attachedDevices.getItem().begin(),
attachedDevices.getItem().end());
}
}
}
return attachedDeviceSet;
}
ConversionResult<AudioDeviceDescription> convertDeviceTypeToAidl(const std::string& xType) {
audio_devices_t legacyDeviceType = AUDIO_DEVICE_NONE;
::android::DeviceConverter::fromString(xType, legacyDeviceType);
ConversionResult<AudioDeviceDescription> result =
legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyDeviceType);
if ((legacyDeviceType == AUDIO_DEVICE_NONE) || !result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: " << xType
<< " is not a valid device type.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioDevice> createAudioDevice(
const ap_xsd::DevicePorts::DevicePort& xDevicePort) {
AudioDevice device = {
.type = VALUE_OR_FATAL(convertDeviceTypeToAidl(xDevicePort.getType())),
.address = xDevicePort.hasAddress()
? AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>(
xDevicePort.getAddress())
: AudioDeviceAddress{}};
if (device.type.type == AudioDeviceType::IN_MICROPHONE && device.type.connection.empty()) {
device.address = "bottom";
} else if (device.type.type == AudioDeviceType::IN_MICROPHONE_BACK &&
device.type.connection.empty()) {
device.address = "back";
}
return device;
}
ConversionResult<AudioPortExt> createAudioPortExt(
const ap_xsd::DevicePorts::DevicePort& xDevicePort,
const std::string& xDefaultOutputDevice) {
AudioPortDeviceExt deviceExt = {
.device = VALUE_OR_FATAL(createAudioDevice(xDevicePort)),
.flags = (xDevicePort.getTagName() == xDefaultOutputDevice)
? 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE
: 0,
.encodedFormats =
xDevicePort.hasEncodedFormats()
? VALUE_OR_FATAL(
(convertCollectionToAidl<std::string, AudioFormatDescription>(
xDevicePort.getEncodedFormats(),
&convertAudioFormatToAidl)))
: std::vector<AudioFormatDescription>{},
};
return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
}
ConversionResult<AudioPortExt> createAudioPortExt(const ap_xsd::MixPorts::MixPort& xMixPort) {
AudioPortMixExt mixExt = {
.maxOpenStreamCount =
xMixPort.hasMaxOpenCount() ? static_cast<int>(xMixPort.getMaxOpenCount()) : 0,
.maxActiveStreamCount = xMixPort.hasMaxActiveCount()
? static_cast<int>(xMixPort.getMaxActiveCount())
: 1,
.recommendedMuteDurationMs =
xMixPort.hasRecommendedMuteDurationMs()
? static_cast<int>(xMixPort.getRecommendedMuteDurationMs())
: 0};
return AudioPortExt::make<AudioPortExt::Tag::mix>(mixExt);
}
ConversionResult<int> convertGainModeToAidl(const std::vector<ap_xsd::AudioGainMode>& gainModeVec) {
int gainModeMask = 0;
for (const ap_xsd::AudioGainMode& gainMode : gainModeVec) {
audio_gain_mode_t legacyGainMode;
if (::android::GainModeConverter::fromString(ap_xsd::toString(gainMode), legacyGainMode)) {
gainModeMask |= static_cast<int>(legacyGainMode);
}
}
return gainModeMask;
}
ConversionResult<AudioChannelLayout> convertChannelMaskToAidl(
const ap_xsd::AudioChannelMask& xChannelMask) {
std::string xChannelMaskLiteral = ap_xsd::toString(xChannelMask);
audio_channel_mask_t legacyChannelMask = ::android::channelMaskFromString(xChannelMaskLiteral);
ConversionResult<AudioChannelLayout> result =
legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
legacyChannelMask,
/* isInput= */ xChannelMaskLiteral.find("AUDIO_CHANNEL_IN_") == 0);
if ((legacyChannelMask == AUDIO_CHANNEL_INVALID) || !result.ok()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: " << xChannelMaskLiteral
<< " is not a valid audio channel mask.";
return unexpected(BAD_VALUE);
}
return result;
}
ConversionResult<AudioGain> convertGainToAidl(const ap_xsd::Gains::Gain& xGain) {
return AudioGain{
.mode = VALUE_OR_FATAL(convertGainModeToAidl(xGain.getMode())),
.channelMask =
xGain.hasChannel_mask()
? VALUE_OR_FATAL(convertChannelMaskToAidl(xGain.getChannel_mask()))
: AudioChannelLayout{},
.minValue = xGain.hasMinValueMB() ? xGain.getMinValueMB() : 0,
.maxValue = xGain.hasMaxValueMB() ? xGain.getMaxValueMB() : 0,
.defaultValue = xGain.hasDefaultValueMB() ? xGain.getDefaultValueMB() : 0,
.stepValue = xGain.hasStepValueMB() ? xGain.getStepValueMB() : 0,
.minRampMs = xGain.hasMinRampMs() ? xGain.getMinRampMs() : 0,
.maxRampMs = xGain.hasMaxRampMs() ? xGain.getMaxRampMs() : 0,
.useForVolume = xGain.hasUseForVolume() ? xGain.getUseForVolume() : false,
};
}
ConversionResult<AudioProfile> convertAudioProfileToAidl(const ap_xsd::Profile& xProfile) {
return AudioProfile{
.format = xProfile.hasFormat()
? VALUE_OR_FATAL(convertAudioFormatToAidl(xProfile.getFormat()))
: AudioFormatDescription{},
.channelMasks =
xProfile.hasChannelMasks()
? VALUE_OR_FATAL((convertCollectionToAidl<ap_xsd::AudioChannelMask,
AudioChannelLayout>(
xProfile.getChannelMasks(), &convertChannelMaskToAidl)))
: std::vector<AudioChannelLayout>{},
.sampleRates = xProfile.hasSamplingRates()
? VALUE_OR_FATAL((convertCollectionToAidl<int64_t, int>(
xProfile.getSamplingRates(),
[](const int64_t x) -> int { return x; })))
: std::vector<int>{}};
}
ConversionResult<AudioIoFlags> convertIoFlagsToAidl(
const std::vector<ap_xsd::AudioInOutFlag>& flags, const ap_xsd::Role role,
bool flagsForMixPort) {
int legacyFlagMask = 0;
if ((role == ap_xsd::Role::sink && flagsForMixPort) ||
(role == ap_xsd::Role::source && !flagsForMixPort)) {
for (const ap_xsd::AudioInOutFlag& flag : flags) {
audio_input_flags_t legacyFlag;
if (::android::InputFlagConverter::fromString(ap_xsd::toString(flag), legacyFlag)) {
legacyFlagMask |= static_cast<int>(legacyFlag);
}
}
return AudioIoFlags::make<AudioIoFlags::Tag::input>(
VALUE_OR_FATAL(legacy2aidl_audio_input_flags_t_int32_t_mask(
static_cast<audio_input_flags_t>(legacyFlagMask))));
} else {
for (const ap_xsd::AudioInOutFlag& flag : flags) {
audio_output_flags_t legacyFlag;
if (::android::OutputFlagConverter::fromString(ap_xsd::toString(flag), legacyFlag)) {
legacyFlagMask |= static_cast<int>(legacyFlag);
}
}
return AudioIoFlags::make<AudioIoFlags::Tag::output>(
VALUE_OR_FATAL(legacy2aidl_audio_output_flags_t_int32_t_mask(
static_cast<audio_output_flags_t>(legacyFlagMask))));
}
}
ConversionResult<AudioPort> convertDevicePortToAidl(
const ap_xsd::DevicePorts::DevicePort& xDevicePort, const std::string& xDefaultOutputDevice,
int32_t& nextPortId) {
return AudioPort{
.id = nextPortId++,
.name = NON_EMPTY_STRING_OR_FATAL(xDevicePort.getTagName()),
.profiles = VALUE_OR_FATAL((convertCollectionToAidl<ap_xsd::Profile, AudioProfile>(
xDevicePort.getProfile(), convertAudioProfileToAidl))),
.flags = VALUE_OR_FATAL(convertIoFlagsToAidl({}, xDevicePort.getRole(), false)),
.gains = VALUE_OR_FATAL(
(convertWrappedCollectionToAidl<ap_xsd::Gains, ap_xsd::Gains::Gain, AudioGain>(
xDevicePort.getGains(), &ap_xsd::Gains::getGain, convertGainToAidl))),
.ext = VALUE_OR_FATAL(createAudioPortExt(xDevicePort, xDefaultOutputDevice))};
}
ConversionResult<std::vector<AudioPort>> convertDevicePortsInModuleToAidl(
const ap_xsd::Modules::Module& xModuleConfig, int32_t& nextPortId) {
std::vector<AudioPort> audioPortVec;
std::vector<ap_xsd::DevicePorts> xDevicePortsVec = xModuleConfig.getDevicePorts();
if (xDevicePortsVec.size() > 1) {
LOG(ERROR) << __func__ << "Having multiple '<devicePorts>' elements is not allowed, found: "
<< xDevicePortsVec.size();
return unexpected(BAD_VALUE);
}
if (!xDevicePortsVec.empty()) {
const std::string xDefaultOutputDevice = xModuleConfig.hasDefaultOutputDevice()
? xModuleConfig.getDefaultOutputDevice()
: "";
audioPortVec.reserve(xDevicePortsVec[0].getDevicePort().size());
for (const ap_xsd::DevicePorts& xDevicePortsType : xDevicePortsVec) {
for (const ap_xsd::DevicePorts::DevicePort& xDevicePort :
xDevicePortsType.getDevicePort()) {
audioPortVec.push_back(VALUE_OR_FATAL(
convertDevicePortToAidl(xDevicePort, xDefaultOutputDevice, nextPortId)));
}
}
}
const std::unordered_set<std::string> xAttachedDeviceSet = getAttachedDevices(xModuleConfig);
for (const auto& port : audioPortVec) {
const auto& devicePort = port.ext.get<AudioPortExt::device>();
if (xAttachedDeviceSet.count(port.name) != devicePort.device.type.connection.empty()) {
LOG(ERROR) << __func__ << ": Review Audio Policy config: <attachedDevices> "
<< "list is incorrect or devicePort \"" << port.name
<< "\" type= " << devicePort.device.type.toString() << " is incorrect.";
return unexpected(BAD_VALUE);
}
}
return audioPortVec;
}
ConversionResult<AudioPort> convertMixPortToAidl(const ap_xsd::MixPorts::MixPort& xMixPort,
int32_t& nextPortId) {
return AudioPort{
.id = nextPortId++,
.name = NON_EMPTY_STRING_OR_FATAL(xMixPort.getName()),
.profiles = VALUE_OR_FATAL((convertCollectionToAidl<ap_xsd::Profile, AudioProfile>(
xMixPort.getProfile(), convertAudioProfileToAidl))),
.flags = xMixPort.hasFlags()
? VALUE_OR_FATAL(convertIoFlagsToAidl(xMixPort.getFlags(),
xMixPort.getRole(), true))
: VALUE_OR_FATAL(convertIoFlagsToAidl({}, xMixPort.getRole(), true)),
.gains = VALUE_OR_FATAL(
(convertWrappedCollectionToAidl<ap_xsd::Gains, ap_xsd::Gains::Gain, AudioGain>(
xMixPort.getGains(), &ap_xsd::Gains::getGain, &convertGainToAidl))),
.ext = VALUE_OR_FATAL(createAudioPortExt(xMixPort)),
};
}
ConversionResult<std::vector<AudioPort>> convertMixPortsInModuleToAidl(
const ap_xsd::Modules::Module& xModuleConfig, int32_t& nextPortId) {
std::vector<AudioPort> audioPortVec;
std::vector<ap_xsd::MixPorts> xMixPortsVec = xModuleConfig.getMixPorts();
if (xMixPortsVec.size() > 1) {
LOG(ERROR) << __func__ << "Having multiple '<mixPorts>' elements is not allowed, found: "
<< xMixPortsVec.size();
return unexpected(BAD_VALUE);
}
if (!xMixPortsVec.empty()) {
audioPortVec.reserve(xMixPortsVec[0].getMixPort().size());
for (const ap_xsd::MixPorts& xMixPortsType : xMixPortsVec) {
for (const ap_xsd::MixPorts::MixPort& xMixPort : xMixPortsType.getMixPort()) {
audioPortVec.push_back(VALUE_OR_FATAL(convertMixPortToAidl(xMixPort, nextPortId)));
}
}
}
return audioPortVec;
}
ConversionResult<int32_t> getSinkPortId(const ap_xsd::Routes::Route& xRoute,
const std::unordered_map<std::string, int32_t>& portMap) {
auto portMapIter = portMap.find(xRoute.getSink());
if (portMapIter == portMap.end()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: audio route"
<< "has sink: " << xRoute.getSink()
<< " which is neither a device port nor mix port.";
return unexpected(BAD_VALUE);
}
return portMapIter->second;
}
ConversionResult<std::vector<int32_t>> getSourcePortIds(
const ap_xsd::Routes::Route& xRoute,
const std::unordered_map<std::string, int32_t>& portMap) {
std::vector<int32_t> sourcePortIds;
for (const std::string& rawSource : ::android::base::Split(xRoute.getSources(), ",")) {
const std::string source = ::android::base::Trim(rawSource);
auto portMapIter = portMap.find(source);
if (portMapIter == portMap.end()) {
LOG(ERROR) << __func__ << " Review Audio Policy config: audio route"
<< "has source \"" << source
<< "\" which is neither a device port nor mix port.";
return unexpected(BAD_VALUE);
}
sourcePortIds.push_back(portMapIter->second);
}
return sourcePortIds;
}
ConversionResult<AudioRoute> convertRouteToAidl(const ap_xsd::Routes::Route& xRoute,
const std::vector<AudioPort>& aidlAudioPorts) {
std::unordered_map<std::string, int32_t> portMap;
for (const AudioPort& port : aidlAudioPorts) {
portMap.insert({port.name, port.id});
}
return AudioRoute{.sourcePortIds = VALUE_OR_FATAL(getSourcePortIds(xRoute, portMap)),
.sinkPortId = VALUE_OR_FATAL(getSinkPortId(xRoute, portMap)),
.isExclusive = (xRoute.getType() == ap_xsd::MixType::mux)};
}
ConversionResult<std::vector<AudioRoute>> convertRoutesInModuleToAidl(
const ap_xsd::Modules::Module& xModuleConfig,
const std::vector<AudioPort>& aidlAudioPorts) {
std::vector<AudioRoute> audioRouteVec;
std::vector<ap_xsd::Routes> xRoutesVec = xModuleConfig.getRoutes();
if (!xRoutesVec.empty()) {
/*
* xRoutesVec likely only contains one element; that is, it's
* likely that all ap_xsd::Routes::MixPort types that we need to convert
* are inside of xRoutesVec[0].
*/
audioRouteVec.reserve(xRoutesVec[0].getRoute().size());
for (const ap_xsd::Routes& xRoutesType : xRoutesVec) {
for (const ap_xsd::Routes::Route& xRoute : xRoutesType.getRoute()) {
audioRouteVec.push_back(VALUE_OR_FATAL(convertRouteToAidl(xRoute, aidlAudioPorts)));
}
}
}
return audioRouteVec;
}
ConversionResult<std::unique_ptr<Module::Configuration>> convertModuleConfigToAidl(
const ap_xsd::Modules::Module& xModuleConfig) {
auto result = std::make_unique<Module::Configuration>();
auto& aidlModuleConfig = *result;
std::vector<AudioPort> devicePorts = VALUE_OR_FATAL(
convertDevicePortsInModuleToAidl(xModuleConfig, aidlModuleConfig.nextPortId));
// The XML config does not specify the default input device.
// Assign the first attached input device as the default.
for (auto& port : devicePorts) {
if (port.flags.getTag() != AudioIoFlags::input) continue;
auto& deviceExt = port.ext.get<AudioPortExt::device>();
if (!deviceExt.device.type.connection.empty()) continue;
deviceExt.flags |= 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
break;
}
std::vector<AudioPort> mixPorts = VALUE_OR_FATAL(
convertMixPortsInModuleToAidl(xModuleConfig, aidlModuleConfig.nextPortId));
aidlModuleConfig.ports.reserve(devicePorts.size() + mixPorts.size());
aidlModuleConfig.ports.insert(aidlModuleConfig.ports.end(), devicePorts.begin(),
devicePorts.end());
aidlModuleConfig.ports.insert(aidlModuleConfig.ports.end(), mixPorts.begin(), mixPorts.end());
aidlModuleConfig.routes =
VALUE_OR_FATAL(convertRoutesInModuleToAidl(xModuleConfig, aidlModuleConfig.ports));
return result;
}
ConversionResult<AudioMode> convertTelephonyModeToAidl(const std::string& xsdcModeCriterionType) {
const auto it = std::find_if(kValidAudioModes.begin(), kValidAudioModes.end(),
[&xsdcModeCriterionType](const auto& mode) {
return toString(mode) == xsdcModeCriterionType;
});
if (it == kValidAudioModes.end()) {
LOG(ERROR) << __func__ << " invalid mode " << xsdcModeCriterionType;
return unexpected(BAD_VALUE);
}
return *it;
}
ConversionResult<AudioDeviceAddress> convertDeviceAddressToAidl(const std::string& xsdcAddress) {
return AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>(xsdcAddress);
}
ConversionResult<eng_xsd::CriterionTypeType> getCriterionTypeByName(
const std::string& name,
const std::vector<eng_xsd::CriterionTypesType>& xsdcCriterionTypesVec) {
for (const auto& xsdCriterionTypes : xsdcCriterionTypesVec) {
for (const auto& xsdcCriterionType : xsdCriterionTypes.getCriterion_type()) {
if (xsdcCriterionType.getName() == name) {
return xsdcCriterionType;
}
}
}
LOG(ERROR) << __func__ << " failed to find criterion type " << name;
return unexpected(BAD_VALUE);
}
ConversionResult<std::vector<std::optional<AudioHalCapCriterionV2>>>
convertCapCriteriaCollectionToAidl(
const std::vector<eng_xsd::CriteriaType>& xsdcCriteriaVec,
const std::vector<eng_xsd::CriterionTypesType>& xsdcCriterionTypesVec) {
std::vector<std::optional<AudioHalCapCriterionV2>> resultAidlCriterionVec;
if (xsdcCriteriaVec.empty() || xsdcCriterionTypesVec.empty()) {
LOG(ERROR) << __func__ << " empty criteria/criterionTypes";
return unexpected(BAD_VALUE);
}
for (const auto& xsdCriteria : xsdcCriteriaVec) {
for (const auto& xsdcCriterion : xsdCriteria.getCriterion()) {
resultAidlCriterionVec.push_back(
std::optional<AudioHalCapCriterionV2>(VALUE_OR_FATAL(
convertCapCriterionV2ToAidl(xsdcCriterion, xsdcCriterionTypesVec))));
}
}
return resultAidlCriterionVec;
}
ConversionResult<std::vector<AudioDeviceDescription>> convertDevicesToAidl(
const eng_xsd::CriterionTypeType& xsdcDeviceCriterionType) {
if (xsdcDeviceCriterionType.getValues().empty()) {
LOG(ERROR) << __func__ << " no values provided";
return unexpected(BAD_VALUE);
}
std::vector<AudioDeviceDescription> aidlDevices;
for (eng_xsd::ValuesType xsdcValues : xsdcDeviceCriterionType.getValues()) {
aidlDevices.reserve(xsdcValues.getValue().size());
for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) {
if (!xsdcValue.hasAndroid_type()) {
LOG(ERROR) << __func__ << " empty android type";
return unexpected(BAD_VALUE);
}
uint32_t integerValue;
if (!convertTo(xsdcValue.getAndroid_type(), integerValue)) {
LOG(ERROR) << __func__ << " failed to convert android type "
<< xsdcValue.getAndroid_type();
return unexpected(BAD_VALUE);
}
aidlDevices.push_back(
VALUE_OR_RETURN(legacy2aidl_audio_devices_t_AudioDeviceDescription(
static_cast<audio_devices_t>(integerValue))));
}
}
return aidlDevices;
}
ConversionResult<std::vector<AudioDeviceAddress>> convertDeviceAddressesToAidl(
const eng_xsd::CriterionTypeType& xsdcDeviceAddressesCriterionType) {
if (xsdcDeviceAddressesCriterionType.getValues().empty()) {
LOG(ERROR) << __func__ << " no values provided";
return unexpected(BAD_VALUE);
}
std::vector<AudioDeviceAddress> aidlDeviceAddresses;
for (eng_xsd::ValuesType xsdcValues : xsdcDeviceAddressesCriterionType.getValues()) {
aidlDeviceAddresses.reserve(xsdcValues.getValue().size());
for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) {
aidlDeviceAddresses.push_back(
AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>(xsdcValue.getLiteral()));
}
}
return aidlDeviceAddresses;
}
ConversionResult<AudioMode> convertAudioModeToAidl(const std::string& xsdcAudioModeType) {
const auto it = std::find_if(enum_range<AudioMode>().begin(), enum_range<AudioMode>().end(),
[&](const auto v) { return toString(v) == xsdcAudioModeType; });
if (it == enum_range<AudioMode>().end()) {
LOG(ERROR) << __func__ << " invalid audio mode " << xsdcAudioModeType;
return unexpected(BAD_VALUE);
}
return *it;
}
ConversionResult<std::vector<AudioMode>> convertTelephonyModesToAidl(
const eng_xsd::CriterionTypeType& xsdcTelephonyModeCriterionType) {
if (xsdcTelephonyModeCriterionType.getValues().empty()) {
LOG(ERROR) << __func__ << " no values provided";
return unexpected(BAD_VALUE);
}
std::vector<AudioMode> aidlAudioModes;
for (eng_xsd::ValuesType xsdcValues : xsdcTelephonyModeCriterionType.getValues()) {
aidlAudioModes.reserve(xsdcValues.getValue().size());
for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) {
aidlAudioModes.push_back(
VALUE_OR_RETURN(convertAudioModeToAidl(xsdcValue.getLiteral())));
}
}
return aidlAudioModes;
}
ConversionResult<std::vector<AudioPolicyForceUse>> convertForceUseConfigsToAidl(
const std::string& criterionValue,
const eng_xsd::CriterionTypeType& xsdcForcedConfigCriterionType) {
if (xsdcForcedConfigCriterionType.getValues().empty()) {
LOG(ERROR) << __func__ << " no values provided";
return unexpected(BAD_VALUE);
}
std::vector<AudioPolicyForceUse> aidlForcedConfigs;
for (eng_xsd::ValuesType xsdcValues : xsdcForcedConfigCriterionType.getValues()) {
aidlForcedConfigs.reserve(xsdcValues.getValue().size());
for (const eng_xsd::ValueType& xsdcValue : xsdcValues.getValue()) {
aidlForcedConfigs.push_back(
VALUE_OR_RETURN(convertForceUseToAidl(criterionValue, xsdcValue.getLiteral())));
}
}
return aidlForcedConfigs;
}
template <typename T>
ConversionResult<T> convertForceUseForcedConfigToAidl(
const std::string& xsdcForcedConfigCriterionType) {
const auto it = std::find_if(enum_range<T>().begin(), enum_range<T>().end(), [&](const auto v) {
return toString(v) == xsdcForcedConfigCriterionType;
});
if (it == enum_range<T>().end()) {
LOG(ERROR) << __func__ << " invalid forced config " << xsdcForcedConfigCriterionType;
return unexpected(BAD_VALUE);
}
return *it;
}
ConversionResult<AudioPolicyForceUse> convertForceUseToAidl(const std::string& xsdcCriterionName,
const std::string& xsdcCriterionValue) {
if (!fastcmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForCommunication,
strlen(kXsdcForceConfigForCommunication))) {
const auto deviceCategory = VALUE_OR_RETURN(
convertForceUseForcedConfigToAidl<AudioPolicyForceUse::CommunicationDeviceCategory>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::forCommunication>(deviceCategory);
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForMedia,
strlen(kXsdcForceConfigForMedia))) {
const auto deviceCategory = VALUE_OR_RETURN(
convertForceUseForcedConfigToAidl<AudioPolicyForceUse::MediaDeviceCategory>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::forMedia>(deviceCategory);
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForRecord,
strlen(kXsdcForceConfigForRecord))) {
const auto deviceCategory = VALUE_OR_RETURN(
convertForceUseForcedConfigToAidl<AudioPolicyForceUse::CommunicationDeviceCategory>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::forRecord>(deviceCategory);
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForDock,
strlen(kXsdcForceConfigForDock))) {
const auto dockType =
VALUE_OR_RETURN(convertForceUseForcedConfigToAidl<AudioPolicyForceUse::DockType>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::dock>(dockType);
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForSystem,
strlen(kXsdcForceConfigForSystem))) {
return AudioPolicyForceUse::make<AudioPolicyForceUse::systemSounds>(xsdcCriterionValue ==
"SYSTEM_ENFORCED");
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForHdmiSystemAudio,
strlen(kXsdcForceConfigForHdmiSystemAudio))) {
return AudioPolicyForceUse::make<AudioPolicyForceUse::hdmiSystemAudio>(
xsdcCriterionValue == "HDMI_SYSTEM_AUDIO_ENFORCED");
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForEncodedSurround,
strlen(kXsdcForceConfigForEncodedSurround))) {
const auto encodedSurround = VALUE_OR_RETURN(
convertForceUseForcedConfigToAidl<AudioPolicyForceUse::EncodedSurroundConfig>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::encodedSurround>(encodedSurround);
}
if (!fasticmp<strncmp>(xsdcCriterionName.c_str(), kXsdcForceConfigForVibrateRinging,
strlen(kXsdcForceConfigForVibrateRinging))) {
const auto deviceCategory = VALUE_OR_RETURN(
convertForceUseForcedConfigToAidl<AudioPolicyForceUse::CommunicationDeviceCategory>(
xsdcCriterionValue));
return AudioPolicyForceUse::make<AudioPolicyForceUse::forVibrateRinging>(deviceCategory);
}
LOG(ERROR) << __func__ << " unrecognized force use " << xsdcCriterionName;
return unexpected(BAD_VALUE);
}
ConversionResult<AudioHalCapCriterionV2> convertCapCriterionV2ToAidl(
const eng_xsd::CriterionType& xsdcCriterion,
const std::vector<eng_xsd::CriterionTypesType>& xsdcCriterionTypesVec) {
eng_xsd::CriterionTypeType xsdcCriterionType =
VALUE_OR_RETURN(getCriterionTypeByName(xsdcCriterion.getType(), xsdcCriterionTypesVec));
std::string defaultLiteralValue =
xsdcCriterion.has_default() ? xsdcCriterion.get_default() : "";
using Tag = AudioHalCapCriterionV2::Tag;
if (iequals(xsdcCriterion.getName(), toString(Tag::availableInputDevices))) {
return AudioHalCapCriterionV2::make<Tag::availableInputDevices>(
VALUE_OR_RETURN(convertDevicesToAidl(xsdcCriterionType)));
}
if (iequals(xsdcCriterion.getName(), toString(Tag::availableOutputDevices))) {
return AudioHalCapCriterionV2::make<Tag::availableOutputDevices>(
VALUE_OR_RETURN(convertDevicesToAidl(xsdcCriterionType)));
}
if (iequals(xsdcCriterion.getName(), toString(Tag::availableInputDevicesAddresses))) {
return AudioHalCapCriterionV2::make<Tag::availableInputDevicesAddresses>(
VALUE_OR_RETURN(convertDeviceAddressesToAidl(xsdcCriterionType)));
}
if (iequals(xsdcCriterion.getName(), toString(Tag::availableOutputDevicesAddresses))) {
return AudioHalCapCriterionV2::make<Tag::availableOutputDevicesAddresses>(
VALUE_OR_RETURN(convertDeviceAddressesToAidl(xsdcCriterionType)));
}
if (iequals(xsdcCriterion.getName(), toString(Tag::telephonyMode))) {
return AudioHalCapCriterionV2::make<Tag::telephonyMode>(
VALUE_OR_RETURN(convertTelephonyModesToAidl(xsdcCriterionType)));
}
if (!fastcmp<strncmp>(xsdcCriterion.getName().c_str(), kXsdcForceConfigForUse,
strlen(kXsdcForceConfigForUse))) {
return AudioHalCapCriterionV2::make<Tag::forceConfigForUse>(VALUE_OR_RETURN(
convertForceUseConfigsToAidl(xsdcCriterion.getName(), xsdcCriterionType)));
}
LOG(ERROR) << __func__ << " unrecognized criterion " << xsdcCriterion.getName();
return unexpected(BAD_VALUE);
}
ConversionResult<AudioHalCapCriterion> convertCapCriterionToAidl(
const eng_xsd::CriterionType& xsdcCriterion) {
AudioHalCapCriterion aidlCapCriterion;
aidlCapCriterion.name = xsdcCriterion.getName();
aidlCapCriterion.criterionTypeName = xsdcCriterion.getType();
aidlCapCriterion.defaultLiteralValue =
xsdcCriterion.has_default() ? xsdcCriterion.get_default() : "";
return aidlCapCriterion;
}
ConversionResult<AudioHalVolumeCurve::CurvePoint> convertCurvePointToAidl(
const std::string& xsdcCurvePoint) {
AudioHalVolumeCurve::CurvePoint aidlCurvePoint{};
if ((sscanf(xsdcCurvePoint.c_str(), "%" SCNd8 ",%d", &aidlCurvePoint.index,
&aidlCurvePoint.attenuationMb) != 2) ||
(aidlCurvePoint.index < AudioHalVolumeCurve::CurvePoint::MIN_INDEX) ||
(aidlCurvePoint.index > AudioHalVolumeCurve::CurvePoint::MAX_INDEX)) {
LOG(ERROR) << __func__ << " Review Audio Policy config: volume curve point:"
<< "\"" << xsdcCurvePoint << "\" is invalid";
return unexpected(BAD_VALUE);
}
return aidlCurvePoint;
}
/**
* The hard coded id must be in sync with policy.h definition of legacy strategy ids.
*/
std::unordered_map<std::string, int> getLegacyProductStrategyMap() {
#define STRATEGY_ENTRY(name, id) {"STRATEGY_" #name, static_cast<int>(id)}
return {STRATEGY_ENTRY(MEDIA, 5),
STRATEGY_ENTRY(PHONE, 0),
STRATEGY_ENTRY(SONIFICATION, 1),
STRATEGY_ENTRY(SONIFICATION_RESPECTFUL, 4),
STRATEGY_ENTRY(DTMF, 6),
STRATEGY_ENTRY(ENFORCED_AUDIBLE, 2),
STRATEGY_ENTRY(CALL_ASSISTANT, 7),
STRATEGY_ENTRY(TRANSMITTED_THROUGH_SPEAKER,8),
STRATEGY_ENTRY(ACCESSIBILITY, 3)};
#undef STRATEGY_ENTRY
}
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,193 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <memory>
#include <unordered_set>
#define LOG_TAG "AHAL_AcousticEchoCancelerSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "AcousticEchoCancelerSw.h"
using aidl::android::hardware::audio::effect::AcousticEchoCancelerSw;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::getEffectImplUuidAcousticEchoCancelerSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::Range;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAcousticEchoCancelerSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<AcousticEchoCancelerSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAcousticEchoCancelerSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = AcousticEchoCancelerSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string AcousticEchoCancelerSw::kEffectName = "AcousticEchoCancelerSw";
const std::vector<Range::AcousticEchoCancelerRange> AcousticEchoCancelerSw::kRanges = {
MAKE_RANGE(AcousticEchoCanceler, echoDelayUs, 0, 500),
/* mobile mode not supported, and not settable */
MAKE_RANGE(AcousticEchoCanceler, mobileMode, false, false)};
const Capability AcousticEchoCancelerSw::kCapability = {.range = AcousticEchoCancelerSw::kRanges};
const Descriptor AcousticEchoCancelerSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidAcousticEchoCanceler(),
.uuid = getEffectImplUuidAcousticEchoCancelerSw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::PRE_PROC,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::NONE},
.name = AcousticEchoCancelerSw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = AcousticEchoCancelerSw::kCapability};
ndk::ScopedAStatus AcousticEchoCancelerSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus AcousticEchoCancelerSw::setParameterSpecific(
const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::acousticEchoCanceler != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& param = specific.get<Parameter::Specific::acousticEchoCanceler>();
RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = param.getTag();
switch (tag) {
case AcousticEchoCanceler::echoDelayUs: {
RETURN_IF(mContext->setEchoDelay(param.get<AcousticEchoCanceler::echoDelayUs>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "echoDelayNotSupported");
return ndk::ScopedAStatus::ok();
}
case AcousticEchoCanceler::mobileMode: {
RETURN_IF(true == param.get<AcousticEchoCanceler::mobileMode>(), EX_ILLEGAL_ARGUMENT,
"SettingmobileModeSupported");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
}
}
}
ndk::ScopedAStatus AcousticEchoCancelerSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::acousticEchoCancelerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto specificId = id.get<Parameter::Id::acousticEchoCancelerTag>();
auto specificIdTag = specificId.getTag();
switch (specificIdTag) {
case AcousticEchoCanceler::Id::commonTag:
return getParameterAcousticEchoCanceler(
specificId.get<AcousticEchoCanceler::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
}
}
ndk::ScopedAStatus AcousticEchoCancelerSw::getParameterAcousticEchoCanceler(
const AcousticEchoCanceler::Tag& tag, Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
AcousticEchoCanceler param;
switch (tag) {
case AcousticEchoCanceler::echoDelayUs: {
param.set<AcousticEchoCanceler::echoDelayUs>(mContext->getEchoDelay());
break;
}
case AcousticEchoCanceler::mobileMode: {
param.set<AcousticEchoCanceler::mobileMode>(false);
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AcousticEchoCancelerTagNotSupported");
}
}
specific->set<Parameter::Specific::acousticEchoCanceler>(param);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> AcousticEchoCancelerSw::createContext(
const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<AcousticEchoCancelerSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode AcousticEchoCancelerSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status AcousticEchoCancelerSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode AcousticEchoCancelerSwContext::setEchoDelay(int echoDelayUs) {
mEchoDelayUs = echoDelayUs;
return RetCode::SUCCESS;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,74 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <aidl/android/hardware/audio/effect/Range.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class AcousticEchoCancelerSwContext final : public EffectContext {
public:
AcousticEchoCancelerSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setEchoDelay(int echoDelayUs);
int getEchoDelay() const { return mEchoDelayUs; }
private:
int mEchoDelayUs = 0;
};
class AcousticEchoCancelerSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
AcousticEchoCancelerSw() { LOG(DEBUG) << __func__; }
~AcousticEchoCancelerSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
private:
static const std::vector<Range::AcousticEchoCancelerRange> kRanges;
std::shared_ptr<AcousticEchoCancelerSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterAcousticEchoCanceler(const AcousticEchoCanceler::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libaecsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"AcousticEchoCancelerSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

297
audio/alsa/Mixer.cpp Normal file
View File

@@ -0,0 +1,297 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cmath>
#define LOG_TAG "AHAL_AlsaMixer"
#include <android-base/logging.h>
#include <android/binder_status.h>
#include <error/expected_utils.h>
#include "Mixer.h"
namespace ndk {
// This enables use of 'error/expected_utils' for ScopedAStatus.
inline bool errorIsOk(const ScopedAStatus& s) {
return s.isOk();
}
inline std::string errorToString(const ScopedAStatus& s) {
return s.getDescription();
}
} // namespace ndk
namespace aidl::android::hardware::audio::core::alsa {
// static
const std::map<Mixer::Control, std::vector<Mixer::ControlNamesAndExpectedCtlType>>
Mixer::kPossibleControls = {
{Mixer::MASTER_SWITCH, {{"Master Playback Switch", MIXER_CTL_TYPE_BOOL}}},
{Mixer::MASTER_VOLUME, {{"Master Playback Volume", MIXER_CTL_TYPE_INT}}},
{Mixer::HW_VOLUME,
{{"Headphone Playback Volume", MIXER_CTL_TYPE_INT},
{"Headset Playback Volume", MIXER_CTL_TYPE_INT},
{"PCM Playback Volume", MIXER_CTL_TYPE_INT}}},
{Mixer::MIC_SWITCH, {{"Capture Switch", MIXER_CTL_TYPE_BOOL}}},
{Mixer::MIC_GAIN, {{"Capture Volume", MIXER_CTL_TYPE_INT}}}};
// static
Mixer::Controls Mixer::initializeMixerControls(struct mixer* mixer) {
if (mixer == nullptr) return {};
Controls mixerControls;
std::string mixerCtlNames;
for (const auto& [control, possibleCtls] : kPossibleControls) {
for (const auto& [ctlName, expectedCtlType] : possibleCtls) {
struct mixer_ctl* ctl = mixer_get_ctl_by_name(mixer, ctlName.c_str());
if (ctl != nullptr && mixer_ctl_get_type(ctl) == expectedCtlType) {
mixerControls.emplace(control, ctl);
if (!mixerCtlNames.empty()) {
mixerCtlNames += ",";
}
mixerCtlNames += ctlName;
break;
}
}
}
LOG(DEBUG) << __func__ << ": available mixer control names=[" << mixerCtlNames << "]";
return mixerControls;
}
std::ostream& operator<<(std::ostream& s, Mixer::Control c) {
switch (c) {
case Mixer::Control::MASTER_SWITCH:
s << "master mute";
break;
case Mixer::Control::MASTER_VOLUME:
s << "master volume";
break;
case Mixer::Control::HW_VOLUME:
s << "volume";
break;
case Mixer::Control::MIC_SWITCH:
s << "mic mute";
break;
case Mixer::Control::MIC_GAIN:
s << "mic gain";
break;
}
return s;
}
Mixer::Mixer(int card) : mMixer(mixer_open(card)), mMixerControls(initializeMixerControls(mMixer)) {
if (!isValid()) {
PLOG(ERROR) << __func__ << ": failed to open mixer for card=" << card;
}
}
Mixer::~Mixer() {
if (isValid()) {
std::lock_guard l(mMixerAccess);
mixer_close(mMixer);
}
}
ndk::ScopedAStatus Mixer::getMasterMute(bool* muted) {
return getMixerControlMute(MASTER_SWITCH, muted);
}
ndk::ScopedAStatus Mixer::getMasterVolume(float* volume) {
return getMixerControlVolume(MASTER_VOLUME, volume);
}
ndk::ScopedAStatus Mixer::getMicGain(float* gain) {
return getMixerControlVolume(MIC_GAIN, gain);
}
ndk::ScopedAStatus Mixer::getMicMute(bool* muted) {
return getMixerControlMute(MIC_SWITCH, muted);
}
ndk::ScopedAStatus Mixer::getVolumes(std::vector<float>* volumes) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(Mixer::HW_VOLUME, &mctl));
std::vector<int> percents;
std::lock_guard l(mMixerAccess);
if (int err = getMixerControlPercent(mctl, &percents); err != 0) {
LOG(ERROR) << __func__ << ": failed to get volume, err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
std::transform(percents.begin(), percents.end(), std::back_inserter(*volumes),
[](int percent) -> float { return std::clamp(percent / 100.0f, 0.0f, 1.0f); });
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Mixer::setMasterMute(bool muted) {
return setMixerControlMute(MASTER_SWITCH, muted);
}
ndk::ScopedAStatus Mixer::setMasterVolume(float volume) {
return setMixerControlVolume(MASTER_VOLUME, volume);
}
ndk::ScopedAStatus Mixer::setMicGain(float gain) {
return setMixerControlVolume(MIC_GAIN, gain);
}
ndk::ScopedAStatus Mixer::setMicMute(bool muted) {
return setMixerControlMute(MIC_SWITCH, muted);
}
ndk::ScopedAStatus Mixer::setVolumes(const std::vector<float>& volumes) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(Mixer::HW_VOLUME, &mctl));
std::vector<int> percents;
std::transform(
volumes.begin(), volumes.end(), std::back_inserter(percents),
[](float volume) -> int { return std::floor(std::clamp(volume, 0.0f, 1.0f) * 100); });
std::lock_guard l(mMixerAccess);
if (int err = setMixerControlPercent(mctl, percents); err != 0) {
LOG(ERROR) << __func__ << ": failed to set volume, err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Mixer::findControl(Control ctl, struct mixer_ctl** result) {
if (!isValid()) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (auto it = mMixerControls.find(ctl); it != mMixerControls.end()) {
*result = it->second;
return ndk::ScopedAStatus::ok();
}
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus Mixer::getMixerControlMute(Control ctl, bool* muted) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl));
std::lock_guard l(mMixerAccess);
std::vector<int> mutedValues;
if (int err = getMixerControlValues(mctl, &mutedValues); err != 0) {
LOG(ERROR) << __func__ << ": failed to get " << ctl << ", err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (mutedValues.empty()) {
LOG(ERROR) << __func__ << ": got no values for " << ctl;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
*muted = mutedValues[0] != 0;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Mixer::getMixerControlVolume(Control ctl, float* volume) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl));
std::lock_guard l(mMixerAccess);
std::vector<int> percents;
if (int err = getMixerControlPercent(mctl, &percents); err != 0) {
LOG(ERROR) << __func__ << ": failed to get " << ctl << ", err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (percents.empty()) {
LOG(ERROR) << __func__ << ": got no values for " << ctl;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
*volume = std::clamp(percents[0] / 100.0f, 0.0f, 1.0f);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Mixer::setMixerControlMute(Control ctl, bool muted) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl));
std::lock_guard l(mMixerAccess);
if (int err = setMixerControlValue(mctl, muted ? 0 : 1); err != 0) {
LOG(ERROR) << __func__ << ": failed to set " << ctl << " to " << muted << ", err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Mixer::setMixerControlVolume(Control ctl, float volume) {
struct mixer_ctl* mctl;
RETURN_STATUS_IF_ERROR(findControl(ctl, &mctl));
volume = std::clamp(volume, 0.0f, 1.0f);
std::lock_guard l(mMixerAccess);
if (int err = setMixerControlPercent(mctl, std::floor(volume * 100)); err != 0) {
LOG(ERROR) << __func__ << ": failed to set " << ctl << " to " << volume << ", err=" << err;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
return ndk::ScopedAStatus::ok();
}
int Mixer::getMixerControlPercent(struct mixer_ctl* ctl, std::vector<int>* percents) {
const unsigned int n = mixer_ctl_get_num_values(ctl);
percents->resize(n);
for (unsigned int id = 0; id < n; id++) {
if (int valueOrError = mixer_ctl_get_percent(ctl, id); valueOrError >= 0) {
(*percents)[id] = valueOrError;
} else {
return valueOrError;
}
}
return 0;
}
int Mixer::getMixerControlValues(struct mixer_ctl* ctl, std::vector<int>* values) {
const unsigned int n = mixer_ctl_get_num_values(ctl);
values->resize(n);
for (unsigned int id = 0; id < n; id++) {
if (int valueOrError = mixer_ctl_get_value(ctl, id); valueOrError >= 0) {
(*values)[id] = valueOrError;
} else {
return valueOrError;
}
}
return 0;
}
int Mixer::setMixerControlPercent(struct mixer_ctl* ctl, int percent) {
const unsigned int n = mixer_ctl_get_num_values(ctl);
for (unsigned int id = 0; id < n; id++) {
if (int error = mixer_ctl_set_percent(ctl, id, percent); error != 0) {
return error;
}
}
return 0;
}
int Mixer::setMixerControlPercent(struct mixer_ctl* ctl, const std::vector<int>& percents) {
const unsigned int n = mixer_ctl_get_num_values(ctl);
for (unsigned int id = 0; id < n; id++) {
if (int error = mixer_ctl_set_percent(ctl, id, id < percents.size() ? percents[id] : 0);
error != 0) {
return error;
}
}
return 0;
}
int Mixer::setMixerControlValue(struct mixer_ctl* ctl, int value) {
const unsigned int n = mixer_ctl_get_num_values(ctl);
for (unsigned int id = 0; id < n; id++) {
if (int error = mixer_ctl_set_value(ctl, id, value); error != 0) {
return error;
}
}
return 0;
}
} // namespace aidl::android::hardware::audio::core::alsa

93
audio/alsa/Mixer.h Normal file
View File

@@ -0,0 +1,93 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <iostream>
#include <map>
#include <memory>
#include <mutex>
#include <string>
#include <vector>
#include <android-base/thread_annotations.h>
#include <android/binder_auto_utils.h>
extern "C" {
#include <tinyalsa/mixer.h>
}
namespace aidl::android::hardware::audio::core::alsa {
class Mixer {
public:
explicit Mixer(int card);
~Mixer();
bool isValid() const { return mMixer != nullptr; }
ndk::ScopedAStatus getMasterMute(bool* muted);
ndk::ScopedAStatus getMasterVolume(float* volume);
ndk::ScopedAStatus getMicGain(float* gain);
ndk::ScopedAStatus getMicMute(bool* muted);
ndk::ScopedAStatus getVolumes(std::vector<float>* volumes);
ndk::ScopedAStatus setMasterMute(bool muted);
ndk::ScopedAStatus setMasterVolume(float volume);
ndk::ScopedAStatus setMicGain(float gain);
ndk::ScopedAStatus setMicMute(bool muted);
ndk::ScopedAStatus setVolumes(const std::vector<float>& volumes);
private:
enum Control {
MASTER_SWITCH,
MASTER_VOLUME,
HW_VOLUME,
MIC_SWITCH,
MIC_GAIN,
};
using ControlNamesAndExpectedCtlType = std::pair<std::string, enum mixer_ctl_type>;
using Controls = std::map<Control, struct mixer_ctl*>;
friend std::ostream& operator<<(std::ostream&, Control);
static const std::map<Control, std::vector<ControlNamesAndExpectedCtlType>> kPossibleControls;
static Controls initializeMixerControls(struct mixer* mixer);
ndk::ScopedAStatus findControl(Control ctl, struct mixer_ctl** result);
ndk::ScopedAStatus getMixerControlMute(Control ctl, bool* muted);
ndk::ScopedAStatus getMixerControlVolume(Control ctl, float* volume);
ndk::ScopedAStatus setMixerControlMute(Control ctl, bool muted);
ndk::ScopedAStatus setMixerControlVolume(Control ctl, float volume);
int getMixerControlPercent(struct mixer_ctl* ctl, std::vector<int>* percents)
REQUIRES(mMixerAccess);
int getMixerControlValues(struct mixer_ctl* ctl, std::vector<int>* values)
REQUIRES(mMixerAccess);
int setMixerControlPercent(struct mixer_ctl* ctl, int percent) REQUIRES(mMixerAccess);
int setMixerControlPercent(struct mixer_ctl* ctl, const std::vector<int>& percents)
REQUIRES(mMixerAccess);
int setMixerControlValue(struct mixer_ctl* ctl, int value) REQUIRES(mMixerAccess);
// Since ALSA functions do not use internal locking, enforce thread safety at our level.
std::mutex mMixerAccess;
// The mixer object is owned by ALSA and will be released when the mixer is closed.
struct mixer* const mMixer;
// `mMixerControls` will only be initialized in constructor. After that, it will only be
// read but not be modified. Each mixer_ctl object is owned by ALSA, it's life span is
// the same as of the mixer itself.
const Controls mMixerControls;
};
} // namespace aidl::android::hardware::audio::core::alsa

68
audio/alsa/ModuleAlsa.cpp Normal file
View File

@@ -0,0 +1,68 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_ModuleAlsa"
#include <vector>
#include <android-base/logging.h>
#include "Utils.h"
#include "core-impl/ModuleAlsa.h"
extern "C" {
#include "alsa_device_profile.h"
}
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioProfile;
namespace aidl::android::hardware::audio::core {
ndk::ScopedAStatus ModuleAlsa::populateConnectedDevicePort(AudioPort* audioPort, int32_t) {
auto deviceProfile = alsa::getDeviceProfile(*audioPort);
if (!deviceProfile.has_value()) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
auto proxy = alsa::readAlsaDeviceInfo(*deviceProfile);
if (proxy.get() == nullptr) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
alsa_device_profile* profile = proxy.getProfile();
std::vector<AudioChannelLayout> channels = alsa::getChannelMasksFromProfile(profile);
std::vector<int> sampleRates = alsa::getSampleRatesFromProfile(profile);
for (size_t i = 0; i < std::min(MAX_PROFILE_FORMATS, AUDIO_PORT_MAX_AUDIO_PROFILES) &&
profile->formats[i] != PCM_FORMAT_INVALID;
++i) {
auto audioFormatDescription =
alsa::c2aidl_pcm_format_AudioFormatDescription(profile->formats[i]);
if (audioFormatDescription.type == AudioFormatType::DEFAULT) {
LOG(WARNING) << __func__ << ": unknown pcm type=" << profile->formats[i];
continue;
}
AudioProfile audioProfile = {.format = audioFormatDescription,
.channelMasks = channels,
.sampleRates = sampleRates};
audioPort->profiles.push_back(std::move(audioProfile));
}
return ndk::ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::audio::core

326
audio/alsa/StreamAlsa.cpp Normal file
View File

@@ -0,0 +1,326 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <limits>
#define LOG_TAG "AHAL_StreamAlsa"
#include <android-base/logging.h>
#include <Utils.h>
#include <audio_utils/clock.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
#include "core-impl/StreamAlsa.h"
using aidl::android::hardware::audio::common::getChannelCount;
namespace aidl::android::hardware::audio::core {
StreamAlsa::StreamAlsa(StreamContext* context, const Metadata& metadata, int readWriteRetries)
: StreamCommonImpl(context, metadata),
mBufferSizeFrames(getContext().getBufferSizeInFrames()),
mFrameSizeBytes(getContext().getFrameSize()),
mSampleRate(getContext().getSampleRate()),
mIsInput(isInput(metadata)),
mConfig(alsa::getPcmConfig(getContext(), mIsInput)),
mReadWriteRetries(readWriteRetries) {}
StreamAlsa::~StreamAlsa() {
cleanupWorker();
}
::android::NBAIO_Format StreamAlsa::getPipeFormat() const {
const audio_format_t audioFormat = VALUE_OR_FATAL(
aidl2legacy_AudioFormatDescription_audio_format_t(getContext().getFormat()));
const int channelCount = getChannelCount(getContext().getChannelLayout());
return ::android::Format_from_SR_C(getContext().getSampleRate(), channelCount, audioFormat);
}
::android::sp<::android::MonoPipe> StreamAlsa::makeSink(bool writeCanBlock) {
const ::android::NBAIO_Format format = getPipeFormat();
auto sink = ::android::sp<::android::MonoPipe>::make(mBufferSizeFrames, format, writeCanBlock);
const ::android::NBAIO_Format offers[1] = {format};
size_t numCounterOffers = 0;
ssize_t index = sink->negotiate(offers, 1, nullptr, numCounterOffers);
LOG_IF(FATAL, index != 0) << __func__ << ": Negotiation for the sink failed, index = " << index;
return sink;
}
::android::sp<::android::MonoPipeReader> StreamAlsa::makeSource(::android::MonoPipe* pipe) {
const ::android::NBAIO_Format format = getPipeFormat();
const ::android::NBAIO_Format offers[1] = {format};
auto source = ::android::sp<::android::MonoPipeReader>::make(pipe);
size_t numCounterOffers = 0;
ssize_t index = source->negotiate(offers, 1, nullptr, numCounterOffers);
LOG_IF(FATAL, index != 0) << __func__
<< ": Negotiation for the source failed, index = " << index;
return source;
}
::android::status_t StreamAlsa::init(DriverCallbackInterface* /*callback*/) {
return mConfig.has_value() ? ::android::OK : ::android::NO_INIT;
}
::android::status_t StreamAlsa::drain(StreamDescriptor::DrainMode) {
if (!mIsInput) {
static constexpr float kMicrosPerSecond = MICROS_PER_SECOND;
const size_t delayUs = static_cast<size_t>(
std::roundf(mBufferSizeFrames * kMicrosPerSecond / mSampleRate));
usleep(delayUs);
}
return ::android::OK;
}
::android::status_t StreamAlsa::flush() {
return ::android::OK;
}
::android::status_t StreamAlsa::pause() {
return ::android::OK;
}
::android::status_t StreamAlsa::standby() {
teardownIo();
return ::android::OK;
}
::android::status_t StreamAlsa::start() {
if (!mAlsaDeviceProxies.empty()) {
// This is a resume after a pause.
return ::android::OK;
}
decltype(mAlsaDeviceProxies) alsaDeviceProxies;
decltype(mSources) sources;
decltype(mSinks) sinks;
for (const auto& device : getDeviceProfiles()) {
if ((device.direction == PCM_OUT && mIsInput) ||
(device.direction == PCM_IN && !mIsInput)) {
continue;
}
alsa::DeviceProxy proxy;
if (device.isExternal) {
// Always ask alsa configure as required since the configuration should be supported
// by the connected device. That is guaranteed by `setAudioPortConfig` and
// `setAudioPatch`.
proxy = alsa::openProxyForExternalDevice(
device, const_cast<struct pcm_config*>(&mConfig.value()),
true /*require_exact_match*/);
} else {
proxy = alsa::openProxyForAttachedDevice(
device, const_cast<struct pcm_config*>(&mConfig.value()), mBufferSizeFrames);
}
if (proxy.get() == nullptr) {
return ::android::NO_INIT;
}
alsaDeviceProxies.push_back(std::move(proxy));
auto sink = makeSink(mIsInput); // Do not block the writer when it is on our thread.
if (sink != nullptr) {
sinks.push_back(sink);
} else {
return ::android::NO_INIT;
}
if (auto source = makeSource(sink.get()); source != nullptr) {
sources.push_back(source);
} else {
return ::android::NO_INIT;
}
}
if (alsaDeviceProxies.empty()) {
return ::android::NO_INIT;
}
mAlsaDeviceProxies = std::move(alsaDeviceProxies);
mSources = std::move(sources);
mSinks = std::move(sinks);
mIoThreadIsRunning = true;
for (size_t i = 0; i < mAlsaDeviceProxies.size(); ++i) {
mIoThreads.emplace_back(mIsInput ? &StreamAlsa::inputIoThread : &StreamAlsa::outputIoThread,
this, i);
}
return ::android::OK;
}
::android::status_t StreamAlsa::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) {
if (mAlsaDeviceProxies.empty()) {
LOG(FATAL) << __func__ << ": no opened devices";
return ::android::NO_INIT;
}
const size_t bytesToTransfer = frameCount * mFrameSizeBytes;
unsigned maxLatency = 0;
if (mIsInput) {
const size_t i = 0; // For the input case, only support a single device.
LOG(VERBOSE) << __func__ << ": reading from sink " << i;
ssize_t framesRead = mSources[i]->read(buffer, frameCount);
LOG_IF(FATAL, framesRead < 0) << "Error reading from the pipe: " << framesRead;
if (ssize_t framesMissing = static_cast<ssize_t>(frameCount) - framesRead;
framesMissing > 0) {
LOG(WARNING) << __func__ << ": incomplete data received, inserting " << framesMissing
<< " frames of silence";
memset(static_cast<char*>(buffer) + framesRead * mFrameSizeBytes, 0,
framesMissing * mFrameSizeBytes);
}
maxLatency = proxy_get_latency(mAlsaDeviceProxies[i].get());
} else {
alsa::applyGain(buffer, mGain, bytesToTransfer, mConfig.value().format, mConfig->channels);
for (size_t i = 0; i < mAlsaDeviceProxies.size(); ++i) {
LOG(VERBOSE) << __func__ << ": writing into sink " << i;
ssize_t framesWritten = mSinks[i]->write(buffer, frameCount);
LOG_IF(FATAL, framesWritten < 0) << "Error writing into the pipe: " << framesWritten;
if (ssize_t framesLost = static_cast<ssize_t>(frameCount) - framesWritten;
framesLost > 0) {
LOG(WARNING) << __func__ << ": sink " << i << " incomplete data sent, dropping "
<< framesLost << " frames";
}
maxLatency = std::max(maxLatency, proxy_get_latency(mAlsaDeviceProxies[i].get()));
}
}
*actualFrameCount = frameCount;
maxLatency = std::min(maxLatency, static_cast<unsigned>(std::numeric_limits<int32_t>::max()));
*latencyMs = maxLatency;
return ::android::OK;
}
::android::status_t StreamAlsa::refinePosition(StreamDescriptor::Position* position) {
if (mAlsaDeviceProxies.empty()) {
LOG(WARNING) << __func__ << ": no opened devices";
return ::android::NO_INIT;
}
// Since the proxy can only count transferred frames since its creation,
// we override its counter value with ours and let it to correct for buffered frames.
alsa::resetTransferredFrames(mAlsaDeviceProxies[0], position->frames);
if (mIsInput) {
if (int ret = proxy_get_capture_position(mAlsaDeviceProxies[0].get(), &position->frames,
&position->timeNs);
ret != 0) {
LOG(WARNING) << __func__ << ": failed to retrieve capture position: " << ret;
return ::android::INVALID_OPERATION;
}
} else {
uint64_t hwFrames;
struct timespec timestamp;
if (int ret = proxy_get_presentation_position(mAlsaDeviceProxies[0].get(), &hwFrames,
&timestamp);
ret == 0) {
if (hwFrames > std::numeric_limits<int64_t>::max()) {
hwFrames -= std::numeric_limits<int64_t>::max();
}
position->frames = static_cast<int64_t>(hwFrames);
position->timeNs = audio_utils_ns_from_timespec(&timestamp);
} else {
LOG(WARNING) << __func__ << ": failed to retrieve presentation position: " << ret;
return ::android::INVALID_OPERATION;
}
}
return ::android::OK;
}
void StreamAlsa::shutdown() {
teardownIo();
}
ndk::ScopedAStatus StreamAlsa::setGain(float gain) {
mGain = gain;
return ndk::ScopedAStatus::ok();
}
void StreamAlsa::inputIoThread(size_t idx) {
#if defined(__ANDROID__)
setWorkerThreadPriority(pthread_gettid_np(pthread_self()));
const std::string threadName = (std::string("in_") + std::to_string(idx)).substr(0, 15);
pthread_setname_np(pthread_self(), threadName.c_str());
#endif
const size_t bufferSize = mBufferSizeFrames * mFrameSizeBytes;
std::vector<char> buffer(bufferSize);
while (mIoThreadIsRunning) {
if (int ret = proxy_read_with_retries(mAlsaDeviceProxies[idx].get(), &buffer[0], bufferSize,
mReadWriteRetries);
ret == 0) {
size_t bufferFramesWritten = 0;
while (bufferFramesWritten < mBufferSizeFrames) {
if (!mIoThreadIsRunning) return;
ssize_t framesWrittenOrError =
mSinks[idx]->write(&buffer[0], mBufferSizeFrames - bufferFramesWritten);
if (framesWrittenOrError >= 0) {
bufferFramesWritten += framesWrittenOrError;
} else {
LOG(WARNING) << __func__ << "[" << idx
<< "]: Error while writing into the pipe: "
<< framesWrittenOrError;
}
}
} else {
// Errors when the stream is being stopped are expected.
LOG_IF(WARNING, mIoThreadIsRunning)
<< __func__ << "[" << idx << "]: Error reading from ALSA: " << ret;
}
}
}
void StreamAlsa::outputIoThread(size_t idx) {
#if defined(__ANDROID__)
setWorkerThreadPriority(pthread_gettid_np(pthread_self()));
const std::string threadName = (std::string("out_") + std::to_string(idx)).substr(0, 15);
pthread_setname_np(pthread_self(), threadName.c_str());
#endif
const size_t bufferSize = mBufferSizeFrames * mFrameSizeBytes;
std::vector<char> buffer(bufferSize);
while (mIoThreadIsRunning) {
ssize_t framesReadOrError = mSources[idx]->read(&buffer[0], mBufferSizeFrames);
if (framesReadOrError > 0) {
int ret = proxy_write_with_retries(mAlsaDeviceProxies[idx].get(), &buffer[0],
framesReadOrError * mFrameSizeBytes,
mReadWriteRetries);
// Errors when the stream is being stopped are expected.
LOG_IF(WARNING, ret != 0 && mIoThreadIsRunning)
<< __func__ << "[" << idx << "]: Error writing into ALSA: " << ret;
} else if (framesReadOrError == 0) {
// MonoPipeReader does not have a blocking read, while use of std::condition_variable
// requires use of a mutex. For now, just do a 1ms sleep. Consider using a different
// pipe / ring buffer mechanism.
if (mIoThreadIsRunning) usleep(1000);
} else {
LOG(WARNING) << __func__ << "[" << idx
<< "]: Error while reading from the pipe: " << framesReadOrError;
}
}
}
void StreamAlsa::teardownIo() {
mIoThreadIsRunning = false;
if (mIsInput) {
LOG(DEBUG) << __func__ << ": shutting down pipes";
for (auto& sink : mSinks) {
sink->shutdown(true);
}
}
LOG(DEBUG) << __func__ << ": stopping PCM streams";
for (const auto& proxy : mAlsaDeviceProxies) {
proxy_stop(proxy.get());
}
LOG(DEBUG) << __func__ << ": joining threads";
for (auto& thread : mIoThreads) {
if (thread.joinable()) thread.join();
}
mIoThreads.clear();
LOG(DEBUG) << __func__ << ": closing PCM devices";
mAlsaDeviceProxies.clear();
mSources.clear();
mSinks.clear();
}
} // namespace aidl::android::hardware::audio::core

480
audio/alsa/Utils.cpp Normal file
View File

@@ -0,0 +1,480 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <map>
#include <set>
#define LOG_TAG "AHAL_AlsaUtils"
#include <Utils.h>
#include <aidl/android/media/audio/common/AudioFormatType.h>
#include <aidl/android/media/audio/common/PcmType.h>
#include <android-base/logging.h>
#include <audio_utils/primitives.h>
#include <cutils/compiler.h>
#include "Utils.h"
#include "core-impl/utils.h"
using aidl::android::hardware::audio::common::getChannelCount;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioPortExt;
using aidl::android::media::audio::common::PcmType;
namespace aidl::android::hardware::audio::core::alsa {
const float kUnityGainFloat = 1.0f;
DeviceProxy::DeviceProxy() : mProfile(nullptr), mProxy(nullptr, alsaProxyDeleter) {}
DeviceProxy::DeviceProxy(const DeviceProfile& deviceProfile)
: mProfile(new alsa_device_profile), mProxy(new alsa_device_proxy, alsaProxyDeleter) {
profile_init(mProfile.get(), deviceProfile.direction);
mProfile->card = deviceProfile.card;
mProfile->device = deviceProfile.device;
memset(mProxy.get(), 0, sizeof(alsa_device_proxy));
}
void DeviceProxy::alsaProxyDeleter(alsa_device_proxy* proxy) {
if (proxy != nullptr) {
proxy_close(proxy);
delete proxy;
}
}
namespace {
using AudioChannelCountToMaskMap = std::map<unsigned int, AudioChannelLayout>;
using AudioFormatDescToPcmFormatMap = std::map<AudioFormatDescription, enum pcm_format>;
using PcmFormatToAudioFormatDescMap = std::map<enum pcm_format, AudioFormatDescription>;
AudioChannelLayout getInvalidChannelLayout() {
static const AudioChannelLayout invalidChannelLayout =
AudioChannelLayout::make<AudioChannelLayout::Tag::invalid>(0);
return invalidChannelLayout;
}
static AudioChannelCountToMaskMap make_ChannelCountToMaskMap(
const std::set<AudioChannelLayout>& channelMasks) {
AudioChannelCountToMaskMap channelMaskToCountMap;
for (const auto& channelMask : channelMasks) {
channelMaskToCountMap.emplace(getChannelCount(channelMask), channelMask);
}
return channelMaskToCountMap;
}
#define DEFINE_CHANNEL_LAYOUT_MASK(n) \
AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(AudioChannelLayout::LAYOUT_##n)
const AudioChannelCountToMaskMap& getSupportedChannelOutLayoutMap() {
static const std::set<AudioChannelLayout> supportedOutChannelLayouts = {
DEFINE_CHANNEL_LAYOUT_MASK(MONO),
DEFINE_CHANNEL_LAYOUT_MASK(STEREO),
};
static const AudioChannelCountToMaskMap outLayouts =
make_ChannelCountToMaskMap(supportedOutChannelLayouts);
return outLayouts;
}
const AudioChannelCountToMaskMap& getSupportedChannelInLayoutMap() {
static const std::set<AudioChannelLayout> supportedInChannelLayouts = {
DEFINE_CHANNEL_LAYOUT_MASK(MONO),
DEFINE_CHANNEL_LAYOUT_MASK(STEREO),
};
static const AudioChannelCountToMaskMap inLayouts =
make_ChannelCountToMaskMap(supportedInChannelLayouts);
return inLayouts;
}
#undef DEFINE_CHANNEL_LAYOUT_MASK
#define DEFINE_CHANNEL_INDEX_MASK(n) \
AudioChannelLayout::make<AudioChannelLayout::Tag::indexMask>(AudioChannelLayout::INDEX_MASK_##n)
const AudioChannelCountToMaskMap& getSupportedChannelIndexLayoutMap() {
static const std::set<AudioChannelLayout> supportedIndexChannelLayouts = {
DEFINE_CHANNEL_INDEX_MASK(1), DEFINE_CHANNEL_INDEX_MASK(2),
DEFINE_CHANNEL_INDEX_MASK(3), DEFINE_CHANNEL_INDEX_MASK(4),
DEFINE_CHANNEL_INDEX_MASK(5), DEFINE_CHANNEL_INDEX_MASK(6),
DEFINE_CHANNEL_INDEX_MASK(7), DEFINE_CHANNEL_INDEX_MASK(8),
DEFINE_CHANNEL_INDEX_MASK(9), DEFINE_CHANNEL_INDEX_MASK(10),
DEFINE_CHANNEL_INDEX_MASK(11), DEFINE_CHANNEL_INDEX_MASK(12),
DEFINE_CHANNEL_INDEX_MASK(13), DEFINE_CHANNEL_INDEX_MASK(14),
DEFINE_CHANNEL_INDEX_MASK(15), DEFINE_CHANNEL_INDEX_MASK(16),
DEFINE_CHANNEL_INDEX_MASK(17), DEFINE_CHANNEL_INDEX_MASK(18),
DEFINE_CHANNEL_INDEX_MASK(19), DEFINE_CHANNEL_INDEX_MASK(20),
DEFINE_CHANNEL_INDEX_MASK(21), DEFINE_CHANNEL_INDEX_MASK(22),
DEFINE_CHANNEL_INDEX_MASK(23), DEFINE_CHANNEL_INDEX_MASK(24),
};
static const AudioChannelCountToMaskMap indexLayouts =
make_ChannelCountToMaskMap(supportedIndexChannelLayouts);
return indexLayouts;
}
#undef DEFINE_CHANNEL_INDEX_MASK
AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
AudioFormatDescription result;
result.type = type;
return result;
}
AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
auto result = make_AudioFormatDescription(AudioFormatType::PCM);
result.pcm = pcm;
return result;
}
const AudioFormatDescToPcmFormatMap& getAudioFormatDescriptorToPcmFormatMap() {
static const AudioFormatDescToPcmFormatMap formatDescToPcmFormatMap = {
{make_AudioFormatDescription(PcmType::INT_16_BIT), PCM_FORMAT_S16_LE},
{make_AudioFormatDescription(PcmType::FIXED_Q_8_24), PCM_FORMAT_S24_LE},
{make_AudioFormatDescription(PcmType::INT_24_BIT), PCM_FORMAT_S24_3LE},
{make_AudioFormatDescription(PcmType::INT_32_BIT), PCM_FORMAT_S32_LE},
{make_AudioFormatDescription(PcmType::FLOAT_32_BIT), PCM_FORMAT_FLOAT_LE},
};
return formatDescToPcmFormatMap;
}
static PcmFormatToAudioFormatDescMap make_PcmFormatToAudioFormatDescMap(
const AudioFormatDescToPcmFormatMap& formatDescToPcmFormatMap) {
PcmFormatToAudioFormatDescMap result;
for (const auto& formatPair : formatDescToPcmFormatMap) {
result.emplace(formatPair.second, formatPair.first);
}
return result;
}
const PcmFormatToAudioFormatDescMap& getPcmFormatToAudioFormatDescMap() {
static const PcmFormatToAudioFormatDescMap pcmFormatToFormatDescMap =
make_PcmFormatToAudioFormatDescMap(getAudioFormatDescriptorToPcmFormatMap());
return pcmFormatToFormatDescMap;
}
void applyGainToInt16Buffer(void* buffer, const size_t bufferSizeBytes, const float gain,
int channelCount) {
const uint16_t unityGainQ4_12 = u4_12_from_float(kUnityGainFloat);
const uint16_t vl = u4_12_from_float(gain);
const uint32_t vrl = (vl << 16) | vl;
int numFrames = 0;
if (channelCount == 2) {
numFrames = bufferSizeBytes / sizeof(uint32_t);
if (numFrames == 0) {
return;
}
uint32_t* intBuffer = (uint32_t*)buffer;
if (CC_UNLIKELY(vl > unityGainQ4_12)) {
do {
int32_t l = mulRL(1, *intBuffer, vrl) >> 12;
int32_t r = mulRL(0, *intBuffer, vrl) >> 12;
l = clamp16(l);
r = clamp16(r);
*intBuffer++ = (r << 16) | (l & 0xFFFF);
} while (--numFrames);
} else {
do {
int32_t l = mulRL(1, *intBuffer, vrl) >> 12;
int32_t r = mulRL(0, *intBuffer, vrl) >> 12;
*intBuffer++ = (r << 16) | (l & 0xFFFF);
} while (--numFrames);
}
} else {
numFrames = bufferSizeBytes / sizeof(uint16_t);
if (numFrames == 0) {
return;
}
int16_t* intBuffer = (int16_t*)buffer;
if (CC_UNLIKELY(vl > unityGainQ4_12)) {
do {
int32_t mono = mul(*intBuffer, static_cast<int16_t>(vl)) >> 12;
*intBuffer++ = clamp16(mono);
} while (--numFrames);
} else {
do {
int32_t mono = mul(*intBuffer, static_cast<int16_t>(vl)) >> 12;
*intBuffer++ = static_cast<int16_t>(mono & 0xFFFF);
} while (--numFrames);
}
}
}
void applyGainToInt32Buffer(int32_t* typedBuffer, const size_t bufferSizeBytes, const float gain) {
int numSamples = bufferSizeBytes / sizeof(int32_t);
if (numSamples == 0) {
return;
}
if (CC_UNLIKELY(gain > kUnityGainFloat)) {
do {
float multiplied = (*typedBuffer) * gain;
if (multiplied > INT32_MAX) {
*typedBuffer++ = INT32_MAX;
} else if (multiplied < INT32_MIN) {
*typedBuffer++ = INT32_MIN;
} else {
*typedBuffer++ = multiplied;
}
} while (--numSamples);
} else {
do {
*typedBuffer++ = (*typedBuffer) * gain;
} while (--numSamples);
}
}
void applyGainToFloatBuffer(float* floatBuffer, const size_t bufferSizeBytes, const float gain) {
int numSamples = bufferSizeBytes / sizeof(float);
if (numSamples == 0) {
return;
}
if (CC_UNLIKELY(gain > kUnityGainFloat)) {
do {
*floatBuffer++ = std::clamp((*floatBuffer) * gain, -kUnityGainFloat, kUnityGainFloat);
} while (--numSamples);
} else {
do {
*floatBuffer++ = (*floatBuffer) * gain;
} while (--numSamples);
}
}
} // namespace
std::ostream& operator<<(std::ostream& os, const DeviceProfile& device) {
return os << "<" << device.card << "," << device.device << ">";
}
AudioChannelLayout getChannelLayoutMaskFromChannelCount(unsigned int channelCount, int isInput) {
return findValueOrDefault(
isInput ? getSupportedChannelInLayoutMap() : getSupportedChannelOutLayoutMap(),
channelCount, getInvalidChannelLayout());
}
AudioChannelLayout getChannelIndexMaskFromChannelCount(unsigned int channelCount) {
return findValueOrDefault(getSupportedChannelIndexLayoutMap(), channelCount,
getInvalidChannelLayout());
}
unsigned int getChannelCountFromChannelMask(const AudioChannelLayout& channelMask, bool isInput) {
switch (channelMask.getTag()) {
case AudioChannelLayout::Tag::layoutMask: {
return findKeyOrDefault(
isInput ? getSupportedChannelInLayoutMap() : getSupportedChannelOutLayoutMap(),
static_cast<unsigned>(getChannelCount(channelMask)), 0u /*defaultValue*/);
}
case AudioChannelLayout::Tag::indexMask: {
return findKeyOrDefault(getSupportedChannelIndexLayoutMap(),
static_cast<unsigned>(getChannelCount(channelMask)),
0u /*defaultValue*/);
}
case AudioChannelLayout::Tag::none:
case AudioChannelLayout::Tag::invalid:
case AudioChannelLayout::Tag::voiceMask:
default:
return 0;
}
}
std::vector<AudioChannelLayout> getChannelMasksFromProfile(const alsa_device_profile* profile) {
const bool isInput = profile->direction == PCM_IN;
std::vector<AudioChannelLayout> channels;
for (size_t i = 0; i < AUDIO_PORT_MAX_CHANNEL_MASKS && profile->channel_counts[i] != 0; ++i) {
auto layoutMask =
alsa::getChannelLayoutMaskFromChannelCount(profile->channel_counts[i], isInput);
if (layoutMask.getTag() == AudioChannelLayout::Tag::layoutMask) {
channels.push_back(layoutMask);
}
auto indexMask = alsa::getChannelIndexMaskFromChannelCount(profile->channel_counts[i]);
if (indexMask.getTag() == AudioChannelLayout::Tag::indexMask) {
channels.push_back(indexMask);
}
}
return channels;
}
std::optional<DeviceProfile> getDeviceProfile(
const ::aidl::android::media::audio::common::AudioDevice& audioDevice, bool isInput) {
if (audioDevice.address.getTag() != AudioDeviceAddress::Tag::alsa) {
LOG(ERROR) << __func__ << ": not alsa address: " << audioDevice.toString();
return std::nullopt;
}
auto& alsaAddress = audioDevice.address.get<AudioDeviceAddress::Tag::alsa>();
if (alsaAddress.size() != 2 || alsaAddress[0] < 0 || alsaAddress[1] < 0) {
LOG(ERROR) << __func__
<< ": malformed alsa address: " << ::android::internal::ToString(alsaAddress);
return std::nullopt;
}
return DeviceProfile{.card = alsaAddress[0],
.device = alsaAddress[1],
.direction = isInput ? PCM_IN : PCM_OUT,
.isExternal = !audioDevice.type.connection.empty()};
}
std::optional<DeviceProfile> getDeviceProfile(
const ::aidl::android::media::audio::common::AudioPort& audioPort) {
if (audioPort.ext.getTag() != AudioPortExt::Tag::device) {
LOG(ERROR) << __func__ << ": port id " << audioPort.id << " is not a device port";
return std::nullopt;
}
auto& devicePort = audioPort.ext.get<AudioPortExt::Tag::device>();
return getDeviceProfile(devicePort.device, audioPort.flags.getTag() == AudioIoFlags::input);
}
std::optional<struct pcm_config> getPcmConfig(const StreamContext& context, bool isInput) {
struct pcm_config config;
config.channels = alsa::getChannelCountFromChannelMask(context.getChannelLayout(), isInput);
if (config.channels == 0) {
LOG(ERROR) << __func__ << ": invalid channel=" << context.getChannelLayout().toString();
return std::nullopt;
}
config.format = alsa::aidl2c_AudioFormatDescription_pcm_format(context.getFormat());
if (config.format == PCM_FORMAT_INVALID) {
LOG(ERROR) << __func__ << ": invalid format=" << context.getFormat().toString();
return std::nullopt;
}
config.rate = context.getSampleRate();
if (config.rate == 0) {
LOG(ERROR) << __func__ << ": invalid sample rate=" << config.rate;
return std::nullopt;
}
return config;
}
std::vector<int> getSampleRatesFromProfile(const alsa_device_profile* profile) {
std::vector<int> sampleRates;
for (int i = 0; i < std::min(MAX_PROFILE_SAMPLE_RATES, AUDIO_PORT_MAX_SAMPLING_RATES) &&
profile->sample_rates[i] != 0;
i++) {
sampleRates.push_back(profile->sample_rates[i]);
}
return sampleRates;
}
DeviceProxy openProxyForAttachedDevice(const DeviceProfile& deviceProfile,
struct pcm_config* pcmConfig, size_t bufferFrameCount) {
if (deviceProfile.isExternal) {
LOG(FATAL) << __func__ << ": called for an external device, address=" << deviceProfile;
}
DeviceProxy proxy(deviceProfile);
if (!profile_fill_builtin_device_info(proxy.getProfile(), pcmConfig, bufferFrameCount)) {
LOG(FATAL) << __func__ << ": failed to init for built-in device, address=" << deviceProfile;
}
if (int err = proxy_prepare_from_default_config(proxy.get(), proxy.getProfile()); err != 0) {
LOG(FATAL) << __func__ << ": fail to prepare for device address=" << deviceProfile
<< " error=" << err;
return DeviceProxy();
}
if (int err = proxy_open(proxy.get()); err != 0) {
LOG(ERROR) << __func__ << ": failed to open device, address=" << deviceProfile
<< " error=" << err;
return DeviceProxy();
}
return proxy;
}
DeviceProxy openProxyForExternalDevice(const DeviceProfile& deviceProfile,
struct pcm_config* pcmConfig, bool requireExactMatch) {
if (!deviceProfile.isExternal) {
LOG(FATAL) << __func__ << ": called for an attached device, address=" << deviceProfile;
}
auto proxy = readAlsaDeviceInfo(deviceProfile);
if (proxy.get() == nullptr) {
return proxy;
}
if (int err = proxy_prepare(proxy.get(), proxy.getProfile(), pcmConfig, requireExactMatch);
err != 0) {
LOG(ERROR) << __func__ << ": fail to prepare for device address=" << deviceProfile
<< " error=" << err;
return DeviceProxy();
}
if (int err = proxy_open(proxy.get()); err != 0) {
LOG(ERROR) << __func__ << ": failed to open device, address=" << deviceProfile
<< " error=" << err;
return DeviceProxy();
}
return proxy;
}
DeviceProxy readAlsaDeviceInfo(const DeviceProfile& deviceProfile) {
DeviceProxy proxy(deviceProfile);
if (!profile_read_device_info(proxy.getProfile())) {
LOG(ERROR) << __func__ << ": unable to read device info, device address=" << deviceProfile;
return DeviceProxy();
}
return proxy;
}
void resetTransferredFrames(DeviceProxy& proxy, uint64_t frames) {
if (proxy.get() != nullptr) {
proxy.get()->transferred = frames;
}
}
AudioFormatDescription c2aidl_pcm_format_AudioFormatDescription(enum pcm_format legacy) {
return findValueOrDefault(getPcmFormatToAudioFormatDescMap(), legacy, AudioFormatDescription());
}
pcm_format aidl2c_AudioFormatDescription_pcm_format(const AudioFormatDescription& aidl) {
return findValueOrDefault(getAudioFormatDescriptorToPcmFormatMap(), aidl, PCM_FORMAT_INVALID);
}
void applyGain(void* buffer, float gain, size_t bufferSizeBytes, enum pcm_format pcmFormat,
int channelCount) {
if (channelCount != 1 && channelCount != 2) {
LOG(WARNING) << __func__ << ": unsupported channel count " << channelCount;
return;
}
if (!getPcmFormatToAudioFormatDescMap().contains(pcmFormat)) {
LOG(WARNING) << __func__ << ": unsupported pcm format " << pcmFormat;
return;
}
if (std::abs(gain - kUnityGainFloat) < 1e-6) {
return;
}
switch (pcmFormat) {
case PCM_FORMAT_S16_LE:
applyGainToInt16Buffer(buffer, bufferSizeBytes, gain, channelCount);
break;
case PCM_FORMAT_FLOAT_LE: {
float* floatBuffer = (float*)buffer;
applyGainToFloatBuffer(floatBuffer, bufferSizeBytes, gain);
} break;
case PCM_FORMAT_S24_LE:
// PCM_FORMAT_S24_LE buffer is composed of signed fixed-point 32-bit Q8.23 data with
// min and max limits of the same bit representation as min and max limits of
// PCM_FORMAT_S32_LE buffer.
case PCM_FORMAT_S32_LE: {
int32_t* typedBuffer = (int32_t*)buffer;
applyGainToInt32Buffer(typedBuffer, bufferSizeBytes, gain);
} break;
case PCM_FORMAT_S24_3LE: {
int numSamples = bufferSizeBytes / (sizeof(uint8_t) * 3);
if (numSamples == 0) {
return;
}
std::unique_ptr<int32_t[]> typedBuffer(new int32_t[numSamples]);
memcpy_to_i32_from_p24(typedBuffer.get(), (uint8_t*)buffer, numSamples);
applyGainToInt32Buffer(typedBuffer.get(), numSamples * sizeof(int32_t), gain);
memcpy_to_p24_from_i32((uint8_t*)buffer, typedBuffer.get(), numSamples);
} break;
default:
LOG(FATAL) << __func__ << ": unsupported pcm format " << pcmFormat;
break;
}
}
} // namespace aidl::android::hardware::audio::core::alsa

90
audio/alsa/Utils.h Normal file
View File

@@ -0,0 +1,90 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <functional>
#include <iostream>
#include <memory>
#include <optional>
#include <vector>
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioPort.h>
#include "core-impl/Stream.h"
extern "C" {
#include <tinyalsa/pcm.h>
#include "alsa_device_profile.h"
#include "alsa_device_proxy.h"
}
namespace aidl::android::hardware::audio::core::alsa {
struct DeviceProfile {
int card;
int device;
int direction; /* PCM_OUT or PCM_IN */
bool isExternal;
};
std::ostream& operator<<(std::ostream& os, const DeviceProfile& device);
class DeviceProxy {
public:
DeviceProxy(); // Constructs a "null" proxy.
explicit DeviceProxy(const DeviceProfile& deviceProfile);
alsa_device_profile* getProfile() const { return mProfile.get(); }
alsa_device_proxy* get() const { return mProxy.get(); }
private:
static void alsaProxyDeleter(alsa_device_proxy* proxy);
using AlsaProxy = std::unique_ptr<alsa_device_proxy, decltype(alsaProxyDeleter)*>;
std::unique_ptr<alsa_device_profile> mProfile;
AlsaProxy mProxy;
};
void applyGain(void* buffer, float gain, size_t bytesToTransfer, enum pcm_format pcmFormat,
int channelCount);
::aidl::android::media::audio::common::AudioChannelLayout getChannelLayoutMaskFromChannelCount(
unsigned int channelCount, int isInput);
::aidl::android::media::audio::common::AudioChannelLayout getChannelIndexMaskFromChannelCount(
unsigned int channelCount);
unsigned int getChannelCountFromChannelMask(
const ::aidl::android::media::audio::common::AudioChannelLayout& channelMask, bool isInput);
std::vector<::aidl::android::media::audio::common::AudioChannelLayout> getChannelMasksFromProfile(
const alsa_device_profile* profile);
std::optional<DeviceProfile> getDeviceProfile(
const ::aidl::android::media::audio::common::AudioDevice& audioDevice, bool isInput);
std::optional<DeviceProfile> getDeviceProfile(
const ::aidl::android::media::audio::common::AudioPort& audioPort);
std::optional<struct pcm_config> getPcmConfig(const StreamContext& context, bool isInput);
std::vector<int> getSampleRatesFromProfile(const alsa_device_profile* profile);
DeviceProxy openProxyForAttachedDevice(const DeviceProfile& deviceProfile,
struct pcm_config* pcmConfig, size_t bufferFrameCount);
DeviceProxy openProxyForExternalDevice(const DeviceProfile& deviceProfile,
struct pcm_config* pcmConfig, bool requireExactMatch);
DeviceProxy readAlsaDeviceInfo(const DeviceProfile& deviceProfile);
void resetTransferredFrames(DeviceProxy& proxy, uint64_t frames);
::aidl::android::media::audio::common::AudioFormatDescription
c2aidl_pcm_format_AudioFormatDescription(enum pcm_format legacy);
pcm_format aidl2c_AudioFormatDescription_pcm_format(
const ::aidl::android::media::audio::common::AudioFormatDescription& aidl);
} // namespace aidl::android::hardware::audio::core::alsa

View File

@@ -0,0 +1,24 @@
service vendor.audio-hal-aidl /apex/com.android.hardware.audio/bin/hw/android.hardware.audio.service-aidl.example
class hal
user audioserver
# media gid needed for /dev/fm (radio) and for /data/misc/media (tee)
group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct wakelock context_hub
capabilities BLOCK_SUSPEND SYS_NICE
# setting RLIMIT_RTPRIO allows binder RT priority inheritance
rlimit rtprio 10 10
ioprio rt 4
task_profiles ProcessCapacityHigh HighPerformance
onrestart restart audioserver
service vendor.audio-effect-hal-aidl /apex/com.android.hardware.audio/bin/hw/android.hardware.audio.effect.service-aidl.example
class hal
user audioserver
# media gid needed for /dev/fm (radio) and for /data/misc/media (tee)
group audio media
capabilities BLOCK_SUSPEND
# setting RLIMIT_RTPRIO allows binder RT priority inheritance
rlimit rtprio 10 10
ioprio rt 4
task_profiles ProcessCapacityHigh HighPerformance
onrestart restart audioserver

View File

@@ -0,0 +1,39 @@
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IModule/default</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IModule/r_submix</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IModule/bluetooth</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IConfig/default</fqname>
</hal>
<!-- Uncomment when these modules present in the configuration
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IModule/stub</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>3</version>
<fqname>IModule/usb</fqname>
</hal>
-->
<hal format="aidl">
<name>android.hardware.audio.effect</name>
<version>3</version>
<fqname>IFactory/default</fqname>
</hal>
</manifest>

View File

@@ -0,0 +1,58 @@
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
apex {
name: "com.android.hardware.audio",
manifest: "manifest.json",
file_contexts: "file_contexts",
key: "com.android.hardware.key",
certificate: ":com.android.hardware.certificate",
updatable: false,
vendor: true,
binaries: [
"android.hardware.audio.service-aidl.example",
"android.hardware.audio.effect.service-aidl.example",
],
native_shared_libs: [
"libaecsw",
"libagc1sw",
"libagc2sw",
"libbassboostsw",
"libbundleaidl",
"libdownmixaidl",
"libdynamicsprocessingaidl",
"libenvreverbsw",
"libequalizersw",
"libextensioneffect",
"libhapticgeneratoraidl",
"libloudnessenhanceraidl",
"libnssw",
"libpreprocessingaidl",
"libpresetreverbsw",
"libreverbaidl",
"libspatializersw",
"libvirtualizersw",
"libvisualizeraidl",
"libvolumesw",
],
prebuilts: [
"android.hardware.audio.service-aidl.example.rc",
"android.hardware.audio.service-aidl.xml",
"android.hardware.bluetooth.audio.xml",
],
required: [
"aidl_audio_set_configurations_bfbs",
"aidl_default_audio_set_configurations_json",
"aidl_audio_set_scenarios_bfbs",
"aidl_default_audio_set_scenarios_json",
"hfp_codec_capabilities_xml",
],
}

View File

@@ -0,0 +1,4 @@
(/.*)? u:object_r:vendor_file:s0
/etc(/.*)? u:object_r:vendor_configs_file:s0
/bin/hw/android\.hardware\.audio\.service-aidl\.example u:object_r:hal_audio_default_exec:s0
/bin/hw/android\.hardware\.audio\.effect\.service-aidl\.example u:object_r:hal_audio_default_exec:s0

View File

@@ -0,0 +1,4 @@
{
"name": "com.android.hardware.audio",
"version": 1
}

View File

@@ -0,0 +1,178 @@
<?xml version="1.0" encoding="UTF-8"?>
<audio_effects_conf version="2.0" xmlns="http://schemas.android.com/audio/audio_effects_conf/v2_0">
<!-- Overview.
This example config file was copy from existing one: frameworks/av/media/libeffects/data/
audio_effects.xml, with effect library names updated to AIDL libraries we currently have.
All "library" attributes in "effect" element must must match a "library" element with the
same value of the "name" attribute.
All "effect" attributes in "preprocess" and "postprocess" element must match an "effect"
element with the same value of the "name" attribute.
AIDL EffectFactory are relying on the "name" attribute in "effect" element to identify the
effect type, so it's necessary to have the mapping from name to effect type UUID. Make
sure to either use existing effect name as key of
::android::hardware::audio::effect::kUuidNameTypeMap, or add a new {name, typeUUID} map
item to the kUuidNameTypeMap.
Existing audio_effects.xml should working without any change as long as:
1. "path" attribute of "library" element matches with the actual effect library name.
2. "name" attribute of "effect" and "effectProxy" element correctly added as key of
kUuidNameTypeMap, with value matches Identity.type in Descriptor.aidl.
3. "uuid" attribute of "effect" element matches Identity.uuid in Descriptor.aidl.
4. "uuid" attribute of "effectProxy" element matches Identity.proxy in Descriptor.aidl.
-->
<!-- List of effect libraries to load.
Each library element must contain a "name" attribute and a "path" attribute giving the
name of a library .so file on the target device.
-->
<libraries>
<library name="aecsw" path="libaecsw.so"/>
<library name="agc1sw" path="libagc1sw.so"/>
<library name="agc2sw" path="libagc2sw.so"/>
<library name="bassboostsw" path="libbassboostsw.so"/>
<library name="bundle" path="libbundleaidl.so"/>
<library name="downmix" path="libdownmixaidl.so"/>
<library name="dynamics_processing" path="libdynamicsprocessingaidl.so"/>
<library name="equalizersw" path="libequalizersw.so"/>
<library name="erasersw" path="liberasersw.so"/>
<library name="haptic_generator" path="libhapticgeneratoraidl.so"/>
<library name="loudness_enhancer" path="libloudnessenhanceraidl.so"/>
<library name="nssw" path="libnssw.so"/>
<library name="env_reverbsw" path="libenvreverbsw.so"/>
<library name="pre_processing" path="libpreprocessingaidl.so"/>
<library name="preset_reverbsw" path="libpresetreverbsw.so"/>
<library name="reverb" path="libreverbaidl.so"/>
<library name="virtualizersw" path="libvirtualizersw.so"/>
<library name="visualizer" path="libvisualizeraidl.so"/>
<library name="volumesw" path="libvolumesw.so"/>
<library name="extensioneffect" path="libextensioneffect.so"/>
<library name="spatializersw" path="libspatializersw.so"/>
</libraries>
<!-- list of effects to load.
Each "effect" element must contain a "name", "library" and a "uuid" attribute, an optional
"type" attribute can be used to add any customized effect type.
The value of the "library" attribute must correspond to the name of one library element in
the "libraries" element.
The "name" attribute used to specific effect type, and should be mapping to a key of
aidl::android::hardware::audio::effect::kUuidNameTypeMap.
The "uuid" attribute is the implementation specific UUID as specified by the effect vendor.
Effect proxy can be supported with "effectProxy" element, each sub-element should contain
"library" and "uuid" attribute, all other attributes were ignored. Framework side use
result of IFactory.queryEffects() to decide which effect implementation should be part of
proxy and which not.
Only "name", "library", "uuid", and "type" attributes in "effects" element are meaningful
and parsed out by EffectConfig class, all other attributes are ignored.
Only "name" and "uuid" attributes in "effectProxy" element are meaningful and parsed out
by EffectConfig class, all other attributes are ignored.
-->
<effects>
<effect name="automatic_gain_control_v2" library="pre_processing" uuid="89f38e65-d4d2-4d64-ad0e-2b3e799ea886"/>
<effect name="bassboost" library="bundle" uuid="8631f300-72e2-11df-b57e-0002a5d5c51b"/>
<effect name="downmix" library="downmix" uuid="93f04452-e4fe-41cc-91f9-e475b6d1d69f"/>
<effect name="dynamics_processing" library="dynamics_processing" uuid="e0e6539b-1781-7261-676f-6d7573696340"/>
<effect name="eraser" library="erasersw" uuid="fa81ab46-588b-11ed-9b6a-0242ac120002"/>
<effect name="haptic_generator" library="haptic_generator" uuid="97c4acd1-8b82-4f2f-832e-c2fe5d7a9931"/>
<effect name="loudness_enhancer" library="loudness_enhancer" uuid="fa415329-2034-4bea-b5dc-5b381c8d1e2c"/>
<effect name="reverb_env_aux" library="reverb" uuid="4a387fc0-8ab3-11df-8bad-0002a5d5c51b"/>
<effect name="reverb_env_ins" library="reverb" uuid="c7a511a0-a3bb-11df-860e-0002a5d5c51b"/>
<effect name="reverb_pre_aux" library="reverb" uuid="f29a1400-a3bb-11df-8ddc-0002a5d5c51b"/>
<effect name="reverb_pre_ins" library="reverb" uuid="172cdf00-a3bc-11df-a72f-0002a5d5c51b"/>
<effect name="virtualizer" library="bundle" uuid="1d4033c0-8557-11df-9f2d-0002a5d5c51b"/>
<effect name="visualizer" library="visualizer" uuid="d069d9e0-8329-11df-9168-0002a5d5c51b"/>
<effect name="volume" library="bundle" uuid="119341a0-8469-11df-81f9-0002a5d5c51b"/>
<effect name="equalizer" library="bundle" uuid="ce772f20-847d-11df-bb17-0002a5d5c51b"/>
<effect name="extension_effect" library="extensioneffect" uuid="fa81dd00-588b-11ed-9b6a-0242ac120002" type="fa81de0e-588b-11ed-9b6a-0242ac120002"/>
<effect name="acoustic_echo_canceler" library="pre_processing" uuid="bb392ec0-8d4d-11e0-a896-0002a5d5c51b"/>
<effect name="noise_suppression" library="pre_processing" uuid="c06c8400-8e06-11e0-9cb6-0002a5d5c51b"/>
</effects>
<preprocess>
<stream type="voice_communication">
<apply effect="acoustic_echo_canceler"/>
<apply effect="noise_suppression"/>
</stream>
</preprocess>
<!-- Audio pre processor configurations.
The pre processor configuration is described in a "preprocess" element and consists in a
list of elements each describing pre processor settings for a given use case or "stream".
Each stream element has a "type" attribute corresponding to the input source used.
Valid types are these defined in system/hardware/interfaces/media/aidl/android/media/audio/
common/AudioSource.aidl.
Each "stream" element contains a list of "apply" elements indicating one effect to apply.
The effect to apply is designated by its name in the "effects" elements.
If there are more than one effect apply to one stream, the audio framework will apply them
in the same equence as they listed in "stream" element.
<preprocess>
<stream type="voice_communication">
<apply effect="aec"/>
<apply effect="ns"/>
</stream>
</preprocess>
-->
<!-- Audio post processor configurations.
The post processor configuration is described in a "postprocess" element and consists in a
list of elements each describing post processor settings for a given use case or "stream".
Each stream element has a "type" attribute corresponding to the stream type used.
Valid types are these defined in system/hardware/interfaces/media/aidl/android/media/audio/
common/AudioStreamType.aidl.
Each "stream" element contains a list of "apply" elements indicating one effect to apply.
The effect to apply is designated by its name in the "effects" elements.
If there are more than one effect apply to one stream, the audio framework will apply them
in the same equence as they listed in "stream" element.
<postprocess>
<stream type="music">
<apply effect="music_post_proc"/>
</stream>
<stream type="voice_call">
<apply effect="voice_post_proc"/>
</stream>
<stream type="notification">
<apply effect="notification_post_proc"/>
</stream>
</postprocess>
-->
<!-- Device pre/post processor configurations.
The device pre/post processor configuration is described in a deviceEffects element and
consists in a list of elements each describing pre/post processor settings for a given
device.
Each device element has a "type" attribute corresponding to the device type (e.g.
speaker, bus), an "address" attribute corresponding to the device address and contains a
list of "apply" elements indicating one effect to apply.
If the device is a source, only pre processing effects are expected, if the
device is a sink, only post processing effects are expected.
The effect to apply is designated by its name in the "effects" elements.
The effect will be enabled by default and the audio framework will automatically add
and activate the effect if the given port is involved in an audio patch.
If the patch is "HW", the effect must be HW accelerated.
Note:
-Device are not expected to be always attached. It may be loaded dynamically. As the device
effect manager is getting called on any audio patch operation, it will ensure if the given
device is involved in an audio patch and attach the requested effect.
-Address is optional. If not set, the match to instantiate the device effect will be done
using the given type and device (of this type) with empty address only.
<deviceEffects>
<device type="AUDIO_DEVICE_OUT_BUS" address="BUS00_USAGE_MAIN">
<apply effect="equalizer"/>
</device>
<device type="AUDIO_DEVICE_OUT_BUS" address="BUS04_USAGE_VOICE">
<apply effect="volume"/>
</device>
<device type="AUDIO_DEVICE_IN_BUILTIN_MIC" address="bottom">
<apply effect="agc"/>
</device>
</deviceEffects>
-->
</audio_effects_conf>

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libagc1sw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"AutomaticGainControlV1Sw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,224 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_AutomaticGainControlV1Sw"
#include <android-base/logging.h>
#include <system/audio_effects/effect_uuid.h>
#include "AutomaticGainControlV1Sw.h"
using aidl::android::hardware::audio::effect::AutomaticGainControlV1Sw;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV1Sw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV1Sw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<AutomaticGainControlV1Sw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV1Sw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = AutomaticGainControlV1Sw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string AutomaticGainControlV1Sw::kEffectName = "AutomaticGainControlV1Sw";
const std::vector<Range::AutomaticGainControlV1Range> AutomaticGainControlV1Sw::kRanges = {
MAKE_RANGE(AutomaticGainControlV1, targetPeakLevelDbFs, -3100, 0),
MAKE_RANGE(AutomaticGainControlV1, maxCompressionGainDb, 0, 9000)};
const Capability AutomaticGainControlV1Sw::kCapability = {
.range = AutomaticGainControlV1Sw::kRanges};
const Descriptor AutomaticGainControlV1Sw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV1(),
.uuid = getEffectImplUuidAutomaticGainControlV1Sw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = AutomaticGainControlV1Sw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = AutomaticGainControlV1Sw::kCapability};
ndk::ScopedAStatus AutomaticGainControlV1Sw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus AutomaticGainControlV1Sw::setParameterSpecific(
const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::automaticGainControlV1 != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& param = specific.get<Parameter::Specific::automaticGainControlV1>();
RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = param.getTag();
switch (tag) {
case AutomaticGainControlV1::targetPeakLevelDbFs: {
RETURN_IF(mContext->setTargetPeakLevel(
param.get<AutomaticGainControlV1::targetPeakLevelDbFs>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "targetPeakLevelNotSupported");
return ndk::ScopedAStatus::ok();
}
case AutomaticGainControlV1::maxCompressionGainDb: {
RETURN_IF(mContext->setMaxCompressionGain(
param.get<AutomaticGainControlV1::maxCompressionGainDb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "maxCompressionGainNotSupported");
return ndk::ScopedAStatus::ok();
}
case AutomaticGainControlV1::enableLimiter: {
RETURN_IF(
mContext->setEnableLimiter(
param.get<AutomaticGainControlV1::enableLimiter>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "enableLimiterNotSupported");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
}
}
}
ndk::ScopedAStatus AutomaticGainControlV1Sw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::automaticGainControlV1Tag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto specificId = id.get<Parameter::Id::automaticGainControlV1Tag>();
auto specificIdTag = specificId.getTag();
switch (specificIdTag) {
case AutomaticGainControlV1::Id::commonTag:
return getParameterAutomaticGainControlV1(
specificId.get<AutomaticGainControlV1::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
}
}
ndk::ScopedAStatus AutomaticGainControlV1Sw::getParameterAutomaticGainControlV1(
const AutomaticGainControlV1::Tag& tag, Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
AutomaticGainControlV1 param;
switch (tag) {
case AutomaticGainControlV1::targetPeakLevelDbFs: {
param.set<AutomaticGainControlV1::targetPeakLevelDbFs>(mContext->getTargetPeakLevel());
break;
}
case AutomaticGainControlV1::maxCompressionGainDb: {
param.set<AutomaticGainControlV1::maxCompressionGainDb>(
mContext->getMaxCompressionGain());
break;
}
case AutomaticGainControlV1::enableLimiter: {
param.set<AutomaticGainControlV1::enableLimiter>(mContext->getEnableLimiter());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV1TagNotSupported");
}
}
specific->set<Parameter::Specific::automaticGainControlV1>(param);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> AutomaticGainControlV1Sw::createContext(
const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext =
std::make_shared<AutomaticGainControlV1SwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode AutomaticGainControlV1Sw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status AutomaticGainControlV1Sw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode AutomaticGainControlV1SwContext::setTargetPeakLevel(int targetPeakLevel) {
mTargetPeakLevel = targetPeakLevel;
return RetCode::SUCCESS;
}
int AutomaticGainControlV1SwContext::getTargetPeakLevel() {
return mTargetPeakLevel;
}
RetCode AutomaticGainControlV1SwContext::setMaxCompressionGain(int maxCompressionGain) {
mMaxCompressionGain = maxCompressionGain;
return RetCode::SUCCESS;
}
int AutomaticGainControlV1SwContext::getMaxCompressionGain() {
return mMaxCompressionGain;
}
RetCode AutomaticGainControlV1SwContext::setEnableLimiter(bool enableLimiter) {
mEnableLimiter = enableLimiter;
return RetCode::SUCCESS;
}
bool AutomaticGainControlV1SwContext::getEnableLimiter() {
return mEnableLimiter;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,76 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class AutomaticGainControlV1SwContext final : public EffectContext {
public:
AutomaticGainControlV1SwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setTargetPeakLevel(int targetPeakLevel);
int getTargetPeakLevel();
RetCode setMaxCompressionGain(int maxCompressionGainDb);
int getMaxCompressionGain();
RetCode setEnableLimiter(bool enableLimiter);
bool getEnableLimiter();
private:
int mTargetPeakLevel = 0;
int mMaxCompressionGain = 0;
bool mEnableLimiter = false;
};
class AutomaticGainControlV1Sw final : public EffectImpl {
public:
static const std::string kEffectName;
static const bool kStrengthSupported;
static const Capability kCapability;
static const Descriptor kDescriptor;
AutomaticGainControlV1Sw() { LOG(DEBUG) << __func__; }
~AutomaticGainControlV1Sw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
private:
static const std::vector<Range::AutomaticGainControlV1Range> kRanges;
std::shared_ptr<AutomaticGainControlV1SwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterAutomaticGainControlV1(const AutomaticGainControlV1::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libagc2sw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"AutomaticGainControlV2Sw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,228 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <memory>
#define LOG_TAG "AHAL_AutomaticGainControlV2Sw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "AutomaticGainControlV2Sw.h"
using aidl::android::hardware::audio::effect::AutomaticGainControlV2Sw;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV2Sw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV2Sw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<AutomaticGainControlV2Sw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidAutomaticGainControlV2Sw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = AutomaticGainControlV2Sw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string AutomaticGainControlV2Sw::kEffectName = "AutomaticGainControlV2Sw";
const std::vector<Range::AutomaticGainControlV2Range> AutomaticGainControlV2Sw::kRanges = {
MAKE_RANGE(AutomaticGainControlV2, fixedDigitalGainMb, 0, 50000),
MAKE_RANGE(AutomaticGainControlV2, saturationMarginMb, 0, 10000)};
const Capability AutomaticGainControlV2Sw::kCapability = {
.range = AutomaticGainControlV2Sw::kRanges};
const Descriptor AutomaticGainControlV2Sw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV2(),
.uuid = getEffectImplUuidAutomaticGainControlV2Sw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = AutomaticGainControlV2Sw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = AutomaticGainControlV2Sw::kCapability};
ndk::ScopedAStatus AutomaticGainControlV2Sw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus AutomaticGainControlV2Sw::setParameterSpecific(
const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::automaticGainControlV2 != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& param = specific.get<Parameter::Specific::automaticGainControlV2>();
RETURN_IF(!inRange(param, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = param.getTag();
switch (tag) {
case AutomaticGainControlV2::fixedDigitalGainMb: {
RETURN_IF(mContext->setDigitalGain(
param.get<AutomaticGainControlV2::fixedDigitalGainMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "digitalGainNotSupported");
return ndk::ScopedAStatus::ok();
}
case AutomaticGainControlV2::levelEstimator: {
RETURN_IF(mContext->setLevelEstimator(
param.get<AutomaticGainControlV2::levelEstimator>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "levelEstimatorNotSupported");
return ndk::ScopedAStatus::ok();
}
case AutomaticGainControlV2::saturationMarginMb: {
RETURN_IF(mContext->setSaturationMargin(
param.get<AutomaticGainControlV2::saturationMarginMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "saturationMarginNotSupported");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
}
}
}
ndk::ScopedAStatus AutomaticGainControlV2Sw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::automaticGainControlV2Tag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto specificId = id.get<Parameter::Id::automaticGainControlV2Tag>();
auto specificIdTag = specificId.getTag();
switch (specificIdTag) {
case AutomaticGainControlV2::Id::commonTag:
return getParameterAutomaticGainControlV2(
specificId.get<AutomaticGainControlV2::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
}
}
ndk::ScopedAStatus AutomaticGainControlV2Sw::getParameterAutomaticGainControlV2(
const AutomaticGainControlV2::Tag& tag, Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
AutomaticGainControlV2 param;
switch (tag) {
case AutomaticGainControlV2::fixedDigitalGainMb: {
param.set<AutomaticGainControlV2::fixedDigitalGainMb>(mContext->getDigitalGain());
break;
}
case AutomaticGainControlV2::levelEstimator: {
param.set<AutomaticGainControlV2::levelEstimator>(mContext->getLevelEstimator());
break;
}
case AutomaticGainControlV2::saturationMarginMb: {
param.set<AutomaticGainControlV2::saturationMarginMb>(mContext->getSaturationMargin());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "AutomaticGainControlV2TagNotSupported");
}
}
specific->set<Parameter::Specific::automaticGainControlV2>(param);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> AutomaticGainControlV2Sw::createContext(
const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext =
std::make_shared<AutomaticGainControlV2SwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode AutomaticGainControlV2Sw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status AutomaticGainControlV2Sw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode AutomaticGainControlV2SwContext::setDigitalGain(int gain) {
mDigitalGain = gain;
return RetCode::SUCCESS;
}
int AutomaticGainControlV2SwContext::getDigitalGain() {
return mDigitalGain;
}
RetCode AutomaticGainControlV2SwContext::setLevelEstimator(
AutomaticGainControlV2::LevelEstimator levelEstimator) {
mLevelEstimator = levelEstimator;
return RetCode::SUCCESS;
}
AutomaticGainControlV2::LevelEstimator AutomaticGainControlV2SwContext::getLevelEstimator() {
return mLevelEstimator;
}
RetCode AutomaticGainControlV2SwContext::setSaturationMargin(int margin) {
mSaturationMargin = margin;
return RetCode::SUCCESS;
}
int AutomaticGainControlV2SwContext::getSaturationMargin() {
return mSaturationMargin;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,82 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class AutomaticGainControlV2SwContext final : public EffectContext {
public:
AutomaticGainControlV2SwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setDigitalGain(int gain);
int getDigitalGain();
RetCode setLevelEstimator(AutomaticGainControlV2::LevelEstimator levelEstimator);
AutomaticGainControlV2::LevelEstimator getLevelEstimator();
RetCode setSaturationMargin(int margin);
int getSaturationMargin();
private:
int mDigitalGain = 0;
AutomaticGainControlV2::LevelEstimator mLevelEstimator =
AutomaticGainControlV2::LevelEstimator::RMS;
int mSaturationMargin = 0;
};
class AutomaticGainControlV2Sw final : public EffectImpl {
public:
static const std::string kEffectName;
static const bool kStrengthSupported;
static const Capability kCapability;
static const Descriptor kDescriptor;
AutomaticGainControlV2Sw() { LOG(DEBUG) << __func__; }
~AutomaticGainControlV2Sw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
private:
static const std::vector<Range::AutomaticGainControlV2Range> kRanges;
std::shared_ptr<AutomaticGainControlV2SwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterAutomaticGainControlV2(const AutomaticGainControlV2::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libbassboostsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"BassBoostSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,176 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <memory>
#define LOG_TAG "AHAL_BassBoostSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "BassBoostSw.h"
using aidl::android::hardware::audio::effect::BassBoostSw;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostProxy;
using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidBassBoostSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<BassBoostSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidBassBoostSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = BassBoostSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string BassBoostSw::kEffectName = "BassBoostSw";
const std::vector<Range::BassBoostRange> BassBoostSw::kRanges = {
MAKE_RANGE(BassBoost, strengthPm, 0, 1000)};
const Capability BassBoostSw::kCapability = {.range = {BassBoostSw::kRanges}};
const Descriptor BassBoostSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidBassBoost(),
.uuid = getEffectImplUuidBassBoostSw()},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = BassBoostSw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = BassBoostSw::kCapability};
ndk::ScopedAStatus BassBoostSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BassBoostSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::bassBoost != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& bbParam = specific.get<Parameter::Specific::bassBoost>();
RETURN_IF(!inRange(bbParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = bbParam.getTag();
switch (tag) {
case BassBoost::strengthPm: {
const auto strength = bbParam.get<BassBoost::strengthPm>();
RETURN_IF(mContext->setBbStrengthPm(strength) != RetCode::SUCCESS, EX_ILLEGAL_ARGUMENT,
"strengthPmNotSupported");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"BassBoostTagNotSupported");
}
}
}
ndk::ScopedAStatus BassBoostSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::bassBoostTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto bbId = id.get<Parameter::Id::bassBoostTag>();
auto bbIdTag = bbId.getTag();
switch (bbIdTag) {
case BassBoost::Id::commonTag:
return getParameterBassBoost(bbId.get<BassBoost::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"BassBoostTagNotSupported");
}
}
ndk::ScopedAStatus BassBoostSw::getParameterBassBoost(const BassBoost::Tag& tag,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
BassBoost bbParam;
switch (tag) {
case BassBoost::strengthPm: {
bbParam.set<BassBoost::strengthPm>(mContext->getBbStrengthPm());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"BassBoostTagNotSupported");
}
}
specific->set<Parameter::Specific::bassBoost>(bbParam);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> BassBoostSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<BassBoostSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode BassBoostSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status BassBoostSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode BassBoostSwContext::setBbStrengthPm(int strength) {
mStrength = strength;
return RetCode::SUCCESS;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,73 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class BassBoostSwContext final : public EffectContext {
public:
BassBoostSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setBbStrengthPm(int strength);
int getBbStrengthPm() const { return mStrength; }
private:
int mStrength = 0;
};
class BassBoostSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
BassBoostSw() { LOG(DEBUG) << __func__; }
~BassBoostSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
private:
static const std::vector<Range::BassBoostRange> kRanges;
std::shared_ptr<BassBoostSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterBassBoost(const BassBoost::Tag& tag,
Parameter::Specific* specific) REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,578 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_BluetoothAudioPort"
#include <android-base/logging.h>
#include <android-base/stringprintf.h>
#include <audio_utils/primitives.h>
#include <log/log.h>
#include "BluetoothAudioSessionControl.h"
#include "core-impl/DevicePortProxy.h"
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::hardware::bluetooth::audio::AudioConfiguration;
using aidl::android::hardware::bluetooth::audio::BluetoothAudioSessionControl;
using aidl::android::hardware::bluetooth::audio::BluetoothAudioStatus;
using aidl::android::hardware::bluetooth::audio::ChannelMode;
using aidl::android::hardware::bluetooth::audio::PcmConfiguration;
using aidl::android::hardware::bluetooth::audio::PortStatusCallbacks;
using aidl::android::hardware::bluetooth::audio::PresentationPosition;
using aidl::android::hardware::bluetooth::audio::SessionType;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
using android::base::StringPrintf;
namespace android::bluetooth::audio::aidl {
namespace {
// The maximum time to wait in std::condition_variable::wait_for()
constexpr unsigned int kMaxWaitingTimeMs = 4500;
} // namespace
std::ostream& operator<<(std::ostream& os, const BluetoothStreamState& state) {
switch (state) {
case BluetoothStreamState::DISABLED:
return os << "DISABLED";
case BluetoothStreamState::STANDBY:
return os << "STANDBY";
case BluetoothStreamState::STARTING:
return os << "STARTING";
case BluetoothStreamState::STARTED:
return os << "STARTED";
case BluetoothStreamState::SUSPENDING:
return os << "SUSPENDING";
case BluetoothStreamState::UNKNOWN:
return os << "UNKNOWN";
default:
return os << android::base::StringPrintf("%#hhx", state);
}
}
BluetoothAudioPortAidl::BluetoothAudioPortAidl()
: mCookie(::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined),
mState(BluetoothStreamState::DISABLED),
mSessionType(SessionType::UNKNOWN) {}
BluetoothAudioPortAidl::~BluetoothAudioPortAidl() {
unregisterPort();
}
bool BluetoothAudioPortAidl::registerPort(const AudioDeviceDescription& description) {
if (inUse()) {
LOG(ERROR) << __func__ << debugMessage() << " already in use";
return false;
}
if (!initSessionType(description)) return false;
auto control_result_cb = [port = this](uint16_t cookie, bool start_resp,
const BluetoothAudioStatus& status) {
(void)start_resp;
port->controlResultHandler(cookie, status);
};
auto session_changed_cb = [port = this](uint16_t cookie) {
port->sessionChangedHandler(cookie);
};
// TODO: Add audio_config_changed_cb
PortStatusCallbacks cbacks = {
.control_result_cb_ = control_result_cb,
.session_changed_cb_ = session_changed_cb,
};
mCookie = BluetoothAudioSessionControl::RegisterControlResultCback(mSessionType, cbacks);
auto isOk = (mCookie != ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined);
if (isOk) {
std::lock_guard guard(mCvMutex);
mState = BluetoothStreamState::STANDBY;
}
LOG(DEBUG) << __func__ << debugMessage();
return isOk;
}
bool BluetoothAudioPortAidl::initSessionType(const AudioDeviceDescription& description) {
if (description.connection == AudioDeviceDescription::CONNECTION_BT_A2DP &&
(description.type == AudioDeviceType::OUT_DEVICE ||
description.type == AudioDeviceType::OUT_HEADPHONE ||
description.type == AudioDeviceType::OUT_SPEAKER)) {
LOG(VERBOSE) << __func__
<< ": device=AUDIO_DEVICE_OUT_BLUETOOTH_A2DP (HEADPHONES/SPEAKER) ("
<< description.toString() << ")";
mSessionType = SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH;
} else if (description.connection == AudioDeviceDescription::CONNECTION_WIRELESS &&
description.type == AudioDeviceType::OUT_HEARING_AID) {
LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_HEARING_AID (MEDIA/VOICE) ("
<< description.toString() << ")";
mSessionType = SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH;
} else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE &&
description.type == AudioDeviceType::OUT_HEADSET) {
LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_HEADSET (MEDIA/VOICE) ("
<< description.toString() << ")";
mSessionType = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH;
} else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE &&
description.type == AudioDeviceType::OUT_SPEAKER) {
LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_SPEAKER (MEDIA) ("
<< description.toString() << ")";
mSessionType = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH;
} else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE &&
description.type == AudioDeviceType::IN_HEADSET) {
LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_IN_BLE_HEADSET (VOICE) ("
<< description.toString() << ")";
mSessionType = SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH;
} else if (description.connection == AudioDeviceDescription::CONNECTION_BT_LE &&
description.type == AudioDeviceType::OUT_BROADCAST) {
LOG(VERBOSE) << __func__ << ": device=AUDIO_DEVICE_OUT_BLE_BROADCAST (MEDIA) ("
<< description.toString() << ")";
mSessionType = SessionType::LE_AUDIO_BROADCAST_SOFTWARE_ENCODING_DATAPATH;
} else {
LOG(ERROR) << __func__ << ": unknown device=" << description.toString();
return false;
}
if (!BluetoothAudioSessionControl::IsSessionReady(mSessionType)) {
LOG(ERROR) << __func__ << ": device=" << description.toString()
<< ", session_type=" << toString(mSessionType) << " is not ready";
return false;
}
return true;
}
void BluetoothAudioPortAidl::unregisterPort() {
if (!inUse()) {
LOG(WARNING) << __func__ << ": BluetoothAudioPortAidl is not in use";
return;
}
BluetoothAudioSessionControl::UnregisterControlResultCback(mSessionType, mCookie);
mCookie = ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined;
LOG(VERBOSE) << __func__ << debugMessage() << " port unregistered";
}
void BluetoothAudioPortAidl::controlResultHandler(uint16_t cookie,
const BluetoothAudioStatus& status) {
std::lock_guard guard(mCvMutex);
if (!inUse()) {
LOG(ERROR) << "control_result_cb: BluetoothAudioPortAidl is not in use";
return;
}
if (mCookie != cookie) {
LOG(ERROR) << "control_result_cb: proxy of device port (cookie="
<< StringPrintf("%#hx", cookie) << ") is corrupted";
return;
}
BluetoothStreamState previous_state = mState;
LOG(INFO) << "control_result_cb:" << debugMessage() << ", previous_state=" << previous_state
<< ", status=" << toString(status);
switch (previous_state) {
case BluetoothStreamState::STARTED:
/* Only Suspend signal can be send in STARTED state*/
if (status == BluetoothAudioStatus::RECONFIGURATION ||
status == BluetoothAudioStatus::SUCCESS) {
mState = BluetoothStreamState::STANDBY;
} else {
LOG(WARNING) << StringPrintf(
"control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, "
"previous_state=%#hhx",
toString(status).c_str(), toString(mSessionType).c_str(), mCookie,
previous_state);
}
break;
case BluetoothStreamState::STARTING:
if (status == BluetoothAudioStatus::SUCCESS) {
mState = BluetoothStreamState::STARTED;
} else {
// Set to standby since the stack may be busy switching between outputs
LOG(WARNING) << StringPrintf(
"control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, "
"previous_state=%#hhx",
toString(status).c_str(), toString(mSessionType).c_str(), mCookie,
previous_state);
mState = BluetoothStreamState::STANDBY;
}
break;
case BluetoothStreamState::SUSPENDING:
if (status == BluetoothAudioStatus::SUCCESS) {
mState = BluetoothStreamState::STANDBY;
} else {
// It will be failed if the headset is disconnecting, and set to disable
// to wait for re-init again
LOG(WARNING) << StringPrintf(
"control_result_cb: status=%s failure for session_type= %s, cookie=%#hx, "
"previous_state=%#hhx",
toString(status).c_str(), toString(mSessionType).c_str(), mCookie,
previous_state);
mState = BluetoothStreamState::DISABLED;
}
break;
default:
LOG(ERROR) << "control_result_cb: unexpected previous_state="
<< StringPrintf(
"control_result_cb: status=%s failure for session_type= %s, "
"cookie=%#hx, previous_state=%#hhx",
toString(status).c_str(), toString(mSessionType).c_str(), mCookie,
previous_state);
return;
}
mInternalCv.notify_all();
}
void BluetoothAudioPortAidl::sessionChangedHandler(uint16_t cookie) {
std::lock_guard guard(mCvMutex);
if (!inUse()) {
LOG(ERROR) << "session_changed_cb: BluetoothAudioPortAidl is not in use";
return;
}
if (mCookie != cookie) {
LOG(ERROR) << "session_changed_cb: proxy of device port (cookie="
<< StringPrintf("%#hx", cookie) << ") is corrupted";
return;
}
BluetoothStreamState previous_state = mState;
LOG(VERBOSE) << "session_changed_cb:" << debugMessage()
<< ", previous_state=" << previous_state;
mState = BluetoothStreamState::DISABLED;
mInternalCv.notify_all();
}
bool BluetoothAudioPortAidl::inUse() const {
return (mCookie != ::aidl::android::hardware::bluetooth::audio::kObserversCookieUndefined);
}
bool BluetoothAudioPortAidl::getPreferredDataIntervalUs(size_t& interval_us) const {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
const AudioConfiguration& hal_audio_cfg =
BluetoothAudioSessionControl::GetAudioConfig(mSessionType);
if (hal_audio_cfg.getTag() != AudioConfiguration::pcmConfig) {
LOG(ERROR) << __func__ << ": unsupported audio cfg tag";
return false;
}
interval_us = hal_audio_cfg.get<AudioConfiguration::pcmConfig>().dataIntervalUs;
return true;
}
bool BluetoothAudioPortAidl::loadAudioConfig(PcmConfiguration& audio_cfg) {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
const AudioConfiguration& hal_audio_cfg =
BluetoothAudioSessionControl::GetAudioConfig(mSessionType);
if (hal_audio_cfg.getTag() != AudioConfiguration::pcmConfig) {
LOG(ERROR) << __func__ << ": unsupported audio cfg tag";
return false;
}
audio_cfg = hal_audio_cfg.get<AudioConfiguration::pcmConfig>();
LOG(VERBOSE) << __func__ << debugMessage() << ", state*=" << getState() << ", PcmConfig=["
<< audio_cfg.toString() << "]";
if (audio_cfg.channelMode == ChannelMode::UNKNOWN) {
return false;
}
return true;
}
bool BluetoothAudioPortAidlOut::loadAudioConfig(PcmConfiguration& audio_cfg) {
if (!BluetoothAudioPortAidl::loadAudioConfig(audio_cfg)) return false;
// WAR to support Mono / 16 bits per sample as the Bluetooth stack requires
if (audio_cfg.channelMode == ChannelMode::MONO && audio_cfg.bitsPerSample == 16) {
mIsStereoToMono = true;
audio_cfg.channelMode = ChannelMode::STEREO;
LOG(INFO) << __func__ << ": force channels = to be AUDIO_CHANNEL_OUT_STEREO";
}
return true;
}
bool BluetoothAudioPortAidl::standby() {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
std::lock_guard guard(mCvMutex);
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request";
if (mState == BluetoothStreamState::DISABLED) {
mState = BluetoothStreamState::STANDBY;
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " done";
return true;
}
return false;
}
bool BluetoothAudioPortAidl::condWaitState(BluetoothStreamState state) {
const auto waitTime = std::chrono::milliseconds(kMaxWaitingTimeMs);
std::unique_lock lock(mCvMutex);
base::ScopedLockAssertion lock_assertion(mCvMutex);
switch (state) {
case BluetoothStreamState::STARTING: {
LOG(VERBOSE) << __func__ << debugMessage() << " waiting for STARTED";
mInternalCv.wait_for(lock, waitTime, [this] {
base::ScopedLockAssertion lock_assertion(mCvMutex);
return mState != BluetoothStreamState::STARTING;
});
return mState == BluetoothStreamState::STARTED;
}
case BluetoothStreamState::SUSPENDING: {
LOG(VERBOSE) << __func__ << debugMessage() << " waiting for SUSPENDED";
mInternalCv.wait_for(lock, waitTime, [this] {
base::ScopedLockAssertion lock_assertion(mCvMutex);
return mState != BluetoothStreamState::SUSPENDING;
});
return mState == BluetoothStreamState::STANDBY;
}
default:
LOG(WARNING) << __func__ << debugMessage() << " waiting for KNOWN";
return false;
}
return false;
}
bool BluetoothAudioPortAidl::start() {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState()
<< ", mono=" << (mIsStereoToMono ? "true" : "false") << " request";
{
std::unique_lock lock(mCvMutex);
base::ScopedLockAssertion lock_assertion(mCvMutex);
if (mState == BluetoothStreamState::STARTED) {
return true; // nop, return
} else if (mState == BluetoothStreamState::SUSPENDING ||
mState == BluetoothStreamState::STARTING) {
/* If port is in transient state, give some time to respond */
auto state_ = mState;
lock.unlock();
if (!condWaitState(state_)) {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure";
return false;
}
}
}
bool retval = false;
{
std::unique_lock lock(mCvMutex);
base::ScopedLockAssertion lock_assertion(mCvMutex);
if (mState == BluetoothStreamState::STARTED) {
retval = true;
} else if (mState == BluetoothStreamState::STANDBY) {
mState = BluetoothStreamState::STARTING;
lock.unlock();
if (BluetoothAudioSessionControl::StartStream(mSessionType)) {
retval = condWaitState(BluetoothStreamState::STARTING);
} else {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState()
<< " Hal fails";
}
}
}
if (retval) {
LOG(INFO) << __func__ << debugMessage() << ", state=" << getState()
<< ", mono=" << (mIsStereoToMono ? "true" : "false") << " done";
} else {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure";
}
return retval; // false if any failure like timeout
}
bool BluetoothAudioPortAidl::suspend() {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request";
{
std::unique_lock lock(mCvMutex);
base::ScopedLockAssertion lock_assertion(mCvMutex);
if (mState == BluetoothStreamState::STANDBY) {
return true; // nop, return
} else if (mState == BluetoothStreamState::SUSPENDING ||
mState == BluetoothStreamState::STARTING) {
/* If port is in transient state, give some time to respond */
auto state_ = mState;
lock.unlock();
if (!condWaitState(state_)) {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure";
return false;
}
}
}
bool retval = false;
{
std::unique_lock lock(mCvMutex);
base::ScopedLockAssertion lock_assertion(mCvMutex);
if (mState == BluetoothStreamState::STANDBY) {
retval = true;
} else if (mState == BluetoothStreamState::STARTED) {
mState = BluetoothStreamState::SUSPENDING;
lock.unlock();
if (BluetoothAudioSessionControl::SuspendStream(mSessionType)) {
retval = condWaitState(BluetoothStreamState::SUSPENDING);
} else {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState()
<< " failure to suspend stream";
}
}
}
if (retval) {
LOG(INFO) << __func__ << debugMessage() << ", state=" << getState() << " done";
} else {
LOG(ERROR) << __func__ << debugMessage() << ", state=" << getState() << " failure";
}
return retval; // false if any failure like timeout
}
void BluetoothAudioPortAidl::stop() {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return;
}
std::lock_guard guard(mCvMutex);
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " request";
if (mState != BluetoothStreamState::DISABLED) {
BluetoothAudioSessionControl::StopStream(mSessionType);
mState = BluetoothStreamState::DISABLED;
}
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState() << " done";
}
size_t BluetoothAudioPortAidlOut::writeData(const void* buffer, size_t bytes) const {
if (!buffer) {
LOG(ERROR) << __func__ << ": bad input arg";
return 0;
}
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return 0;
}
if (!mIsStereoToMono) {
return BluetoothAudioSessionControl::OutWritePcmData(mSessionType, buffer, bytes);
}
// WAR to mix the stereo into Mono (16 bits per sample)
const size_t write_frames = bytes >> 2;
if (write_frames == 0) return 0;
auto src = static_cast<const int16_t*>(buffer);
std::unique_ptr<int16_t[]> dst{new int16_t[write_frames]};
downmix_to_mono_i16_from_stereo_i16(dst.get(), src, write_frames);
// a frame is 16 bits, and the size of a mono frame is equal to half a stereo.
auto totalWrite = BluetoothAudioSessionControl::OutWritePcmData(mSessionType, dst.get(),
write_frames * 2);
return totalWrite * 2;
}
size_t BluetoothAudioPortAidlIn::readData(void* buffer, size_t bytes) const {
if (!buffer) {
LOG(ERROR) << __func__ << ": bad input arg";
return 0;
}
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return 0;
}
return BluetoothAudioSessionControl::InReadPcmData(mSessionType, buffer, bytes);
}
bool BluetoothAudioPortAidl::getPresentationPosition(
PresentationPosition& presentation_position) const {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
bool retval = BluetoothAudioSessionControl::GetPresentationPosition(mSessionType,
presentation_position);
LOG(VERBOSE) << __func__ << debugMessage() << ", state=" << getState()
<< presentation_position.toString();
return retval;
}
bool BluetoothAudioPortAidl::updateSourceMetadata(const SourceMetadata& source_metadata) const {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
LOG(DEBUG) << __func__ << debugMessage() << ", state=" << getState() << ", "
<< source_metadata.tracks.size() << " track(s)";
if (source_metadata.tracks.size() == 0) return true;
return BluetoothAudioSessionControl::UpdateSourceMetadata(mSessionType, source_metadata);
}
bool BluetoothAudioPortAidl::updateSinkMetadata(const SinkMetadata& sink_metadata) const {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
LOG(DEBUG) << __func__ << debugMessage() << ", state=" << getState() << ", "
<< sink_metadata.tracks.size() << " track(s)";
if (sink_metadata.tracks.size() == 0) return true;
return BluetoothAudioSessionControl::UpdateSinkMetadata(mSessionType, sink_metadata);
}
BluetoothStreamState BluetoothAudioPortAidl::getState() const {
return mState;
}
bool BluetoothAudioPortAidl::setState(BluetoothStreamState state) {
if (!inUse()) {
LOG(ERROR) << __func__ << ": BluetoothAudioPortAidl is not in use";
return false;
}
std::lock_guard guard(mCvMutex);
LOG(DEBUG) << __func__ << ": BluetoothAudioPortAidl old state = " << mState
<< " new state = " << state;
mState = state;
return true;
}
bool BluetoothAudioPortAidl::isA2dp() const {
return mSessionType == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
mSessionType == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
}
bool BluetoothAudioPortAidl::isLeAudio() const {
return mSessionType == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH ||
mSessionType == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH ||
mSessionType == SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
mSessionType == SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH ||
mSessionType == SessionType::LE_AUDIO_BROADCAST_SOFTWARE_ENCODING_DATAPATH ||
mSessionType == SessionType::LE_AUDIO_BROADCAST_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
}
std::string BluetoothAudioPortAidl::debugMessage() const {
return StringPrintf(": session_type=%s, cookie=%#hx", toString(mSessionType).c_str(), mCookie);
}
} // namespace android::bluetooth::audio::aidl

View File

@@ -0,0 +1,342 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_ModuleBluetooth"
#include <android-base/logging.h>
#include "BluetoothAudioSession.h"
#include "core-impl/ModuleBluetooth.h"
#include "core-impl/StreamBluetooth.h"
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::hardware::bluetooth::audio::ChannelMode;
using aidl::android::hardware::bluetooth::audio::PcmConfiguration;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioConfigBase;
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioOffloadInfo;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::AudioPortExt;
using aidl::android::media::audio::common::AudioProfile;
using aidl::android::media::audio::common::Int;
using aidl::android::media::audio::common::MicrophoneInfo;
using aidl::android::media::audio::common::PcmType;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidl;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidlIn;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidlOut;
// TODO(b/312265159) bluetooth audio should be in its own process
// Remove this and the shared_libs when that happens
extern "C" binder_status_t createIBluetoothAudioProviderFactory();
namespace aidl::android::hardware::audio::core {
namespace {
PcmType pcmTypeFromBitsPerSample(int8_t bitsPerSample) {
if (bitsPerSample == 8)
return PcmType::UINT_8_BIT;
else if (bitsPerSample == 16)
return PcmType::INT_16_BIT;
else if (bitsPerSample == 24)
return PcmType::INT_24_BIT;
else if (bitsPerSample == 32)
return PcmType::INT_32_BIT;
ALOGE("Unsupported bitsPerSample: %d", bitsPerSample);
return PcmType::DEFAULT;
}
AudioChannelLayout channelLayoutFromChannelMode(ChannelMode mode) {
if (mode == ChannelMode::MONO) {
return AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
AudioChannelLayout::LAYOUT_MONO);
} else if (mode == ChannelMode::STEREO || mode == ChannelMode::DUALMONO) {
return AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
AudioChannelLayout::LAYOUT_STEREO);
}
ALOGE("Unsupported channel mode: %s", toString(mode).c_str());
return AudioChannelLayout{};
}
} // namespace
ModuleBluetooth::ModuleBluetooth(std::unique_ptr<Module::Configuration>&& config)
: Module(Type::BLUETOOTH, std::move(config)) {
// TODO(b/312265159) bluetooth audio should be in its own process
// Remove this and the shared_libs when that happens
binder_status_t status = createIBluetoothAudioProviderFactory();
if (status != STATUS_OK) {
LOG(ERROR) << "Failed to create bluetooth audio provider factory. Status: "
<< ::android::statusToString(status);
}
}
ndk::ScopedAStatus ModuleBluetooth::getBluetoothA2dp(
std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
*_aidl_return = getBtA2dp().getInstance();
LOG(DEBUG) << __func__ << ": returning instance of IBluetoothA2dp: " << _aidl_return->get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleBluetooth::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
*_aidl_return = getBtLe().getInstance();
LOG(DEBUG) << __func__ << ": returning instance of IBluetoothLe: " << _aidl_return->get();
return ndk::ScopedAStatus::ok();
}
ChildInterface<BluetoothA2dp>& ModuleBluetooth::getBtA2dp() {
if (!mBluetoothA2dp) {
auto handle = ndk::SharedRefBase::make<BluetoothA2dp>();
handle->registerHandler(std::bind(&ModuleBluetooth::bluetoothParametersUpdated, this));
mBluetoothA2dp = handle;
}
return mBluetoothA2dp;
}
ChildInterface<BluetoothLe>& ModuleBluetooth::getBtLe() {
if (!mBluetoothLe) {
auto handle = ndk::SharedRefBase::make<BluetoothLe>();
handle->registerHandler(std::bind(&ModuleBluetooth::bluetoothParametersUpdated, this));
mBluetoothLe = handle;
}
return mBluetoothLe;
}
ModuleBluetooth::BtProfileHandles ModuleBluetooth::getBtProfileManagerHandles() {
return std::make_tuple(std::weak_ptr<IBluetooth>(), getBtA2dp().getPtr(), getBtLe().getPtr());
}
ndk::ScopedAStatus ModuleBluetooth::getMicMute(bool* _aidl_return __unused) {
LOG(DEBUG) << __func__ << ": is not supported";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus ModuleBluetooth::setMicMute(bool in_mute __unused) {
LOG(DEBUG) << __func__ << ": is not supported";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus ModuleBluetooth::setAudioPortConfig(const AudioPortConfig& in_requested,
AudioPortConfig* out_suggested,
bool* _aidl_return) {
auto fillConfig = [this](const AudioPort& port, AudioPortConfig* config) {
if (port.ext.getTag() == AudioPortExt::device) {
CachedProxy proxy;
auto status = findOrCreateProxy(port, proxy);
if (status.isOk()) {
const auto& pcmConfig = proxy.pcmConfig;
LOG(DEBUG) << "setAudioPortConfig: suggesting port config from "
<< pcmConfig.toString();
const auto pcmType = pcmTypeFromBitsPerSample(pcmConfig.bitsPerSample);
const auto channelMask = channelLayoutFromChannelMode(pcmConfig.channelMode);
if (pcmType != PcmType::DEFAULT && channelMask != AudioChannelLayout{}) {
config->format =
AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType};
config->channelMask = channelMask;
config->sampleRate = Int{.value = pcmConfig.sampleRateHz};
config->flags = port.flags;
config->ext = port.ext;
return true;
}
}
}
return generateDefaultPortConfig(port, config);
};
return Module::setAudioPortConfigImpl(in_requested, fillConfig, out_suggested, _aidl_return);
}
ndk::ScopedAStatus ModuleBluetooth::checkAudioPatchEndpointsMatch(
const std::vector<AudioPortConfig*>& sources, const std::vector<AudioPortConfig*>& sinks) {
// Both sources and sinks must be non-empty, this is guaranteed by 'setAudioPatch'.
const bool isInput = sources[0]->ext.getTag() == AudioPortExt::device;
const int32_t devicePortId = isInput ? sources[0]->portId : sinks[0]->portId;
const auto proxyIt = mProxies.find(devicePortId);
if (proxyIt == mProxies.end()) return ndk::ScopedAStatus::ok();
const auto& pcmConfig = proxyIt->second.pcmConfig;
const AudioPortConfig* mixPortConfig = isInput ? sinks[0] : sources[0];
if (!StreamBluetooth::checkConfigParams(
pcmConfig, AudioConfigBase{.sampleRate = mixPortConfig->sampleRate->value,
.channelMask = *(mixPortConfig->channelMask),
.format = *(mixPortConfig->format)})) {
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
if (int32_t handle = mixPortConfig->ext.get<AudioPortExt::mix>().handle; handle > 0) {
mConnections.insert(std::pair(handle, devicePortId));
}
return ndk::ScopedAStatus::ok();
}
void ModuleBluetooth::onExternalDeviceConnectionChanged(const AudioPort& audioPort,
bool connected) {
if (!connected) mProxies.erase(audioPort.id);
}
ndk::ScopedAStatus ModuleBluetooth::createInputStream(
StreamContext&& context, const SinkMetadata& sinkMetadata,
const std::vector<MicrophoneInfo>& microphones, std::shared_ptr<StreamIn>* result) {
CachedProxy proxy;
RETURN_STATUS_IF_ERROR(fetchAndCheckProxy(context, proxy));
return createStreamInstance<StreamInBluetooth>(result, std::move(context), sinkMetadata,
microphones, getBtProfileManagerHandles(),
proxy.ptr, proxy.pcmConfig);
}
ndk::ScopedAStatus ModuleBluetooth::createOutputStream(
StreamContext&& context, const SourceMetadata& sourceMetadata,
const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
CachedProxy proxy;
RETURN_STATUS_IF_ERROR(fetchAndCheckProxy(context, proxy));
return createStreamInstance<StreamOutBluetooth>(result, std::move(context), sourceMetadata,
offloadInfo, getBtProfileManagerHandles(),
proxy.ptr, proxy.pcmConfig);
}
ndk::ScopedAStatus ModuleBluetooth::populateConnectedDevicePort(AudioPort* audioPort,
int32_t nextPortId) {
if (audioPort->ext.getTag() != AudioPortExt::device) {
LOG(ERROR) << __func__ << ": not a device port: " << audioPort->toString();
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (!::aidl::android::hardware::bluetooth::audio::BluetoothAudioSession::IsAidlAvailable()) {
LOG(ERROR) << __func__ << ": IBluetoothAudioProviderFactory AIDL service not available";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
const auto& devicePort = audioPort->ext.get<AudioPortExt::device>();
const auto& description = devicePort.device.type;
// This method must return an error when the device can not be connected.
// Since A2DP/LE status events are sent asynchronously, it is more reliable
// to attempt connecting to the BT stack rather than judge by the A2DP/LE status.
if (description.connection != AudioDeviceDescription::CONNECTION_BT_A2DP &&
description.connection != AudioDeviceDescription::CONNECTION_BT_LE &&
!(description.connection == AudioDeviceDescription::CONNECTION_WIRELESS &&
description.type == AudioDeviceType::OUT_HEARING_AID)) {
LOG(ERROR) << __func__ << ": unsupported device type: " << audioPort->toString();
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
CachedProxy proxy;
RETURN_STATUS_IF_ERROR(createProxy(*audioPort, nextPortId, proxy));
// If the device is actually connected, it is configured by the BT stack.
// Provide the current configuration instead of all possible profiles.
const auto& pcmConfig = proxy.pcmConfig;
audioPort->profiles.clear();
audioPort->profiles.push_back(
AudioProfile{.format = AudioFormatDescription{.type = AudioFormatType::PCM,
.pcm = pcmTypeFromBitsPerSample(
pcmConfig.bitsPerSample)},
.channelMasks = std::vector<AudioChannelLayout>(
{channelLayoutFromChannelMode(pcmConfig.channelMode)}),
.sampleRates = std::vector<int>({pcmConfig.sampleRateHz})});
LOG(DEBUG) << __func__ << ": " << audioPort->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleBluetooth::onMasterMuteChanged(bool) {
LOG(DEBUG) << __func__ << ": is not supported";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus ModuleBluetooth::onMasterVolumeChanged(float) {
LOG(DEBUG) << __func__ << ": is not supported";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
int32_t ModuleBluetooth::getNominalLatencyMs(const AudioPortConfig& portConfig) {
const auto connectionsIt = mConnections.find(portConfig.ext.get<AudioPortExt::mix>().handle);
if (connectionsIt != mConnections.end()) {
const auto proxyIt = mProxies.find(connectionsIt->second);
if (proxyIt != mProxies.end()) {
auto proxy = proxyIt->second.ptr;
size_t dataIntervalUs = 0;
if (!proxy->getPreferredDataIntervalUs(dataIntervalUs)) {
LOG(WARNING) << __func__ << ": could not fetch preferred data interval";
}
const bool isInput = portConfig.flags->getTag() == AudioIoFlags::input;
return isInput ? StreamInBluetooth::getNominalLatencyMs(dataIntervalUs)
: StreamOutBluetooth::getNominalLatencyMs(dataIntervalUs);
}
}
LOG(ERROR) << __func__ << ": no connection or proxy found for " << portConfig.toString();
return Module::getNominalLatencyMs(portConfig);
}
ndk::ScopedAStatus ModuleBluetooth::createProxy(const AudioPort& audioPort, int32_t instancePortId,
CachedProxy& proxy) {
const bool isInput = audioPort.flags.getTag() == AudioIoFlags::input;
proxy.ptr = isInput ? std::shared_ptr<BluetoothAudioPortAidl>(
std::make_shared<BluetoothAudioPortAidlIn>())
: std::shared_ptr<BluetoothAudioPortAidl>(
std::make_shared<BluetoothAudioPortAidlOut>());
const auto& devicePort = audioPort.ext.get<AudioPortExt::device>();
const auto device = devicePort.device.type;
bool registrationSuccess = false;
for (int i = 0; i < kCreateProxyRetries && !registrationSuccess; ++i) {
registrationSuccess = proxy.ptr->registerPort(device);
usleep(kCreateProxyRetrySleepMs * 1000);
}
if (!registrationSuccess) {
LOG(ERROR) << __func__ << ": failed to register BT port for " << device.toString();
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (!proxy.ptr->loadAudioConfig(proxy.pcmConfig)) {
LOG(ERROR) << __func__ << ": state=" << proxy.ptr->getState()
<< ", failed to load audio config";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
mProxies.insert(std::pair(instancePortId, proxy));
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleBluetooth::fetchAndCheckProxy(const StreamContext& context,
CachedProxy& proxy) {
const auto connectionsIt = mConnections.find(context.getMixPortHandle());
if (connectionsIt != mConnections.end()) {
const auto proxyIt = mProxies.find(connectionsIt->second);
if (proxyIt != mProxies.end()) {
proxy = proxyIt->second;
mProxies.erase(proxyIt);
}
mConnections.erase(connectionsIt);
}
if (proxy.ptr != nullptr) {
if (!StreamBluetooth::checkConfigParams(
proxy.pcmConfig, AudioConfigBase{.sampleRate = context.getSampleRate(),
.channelMask = context.getChannelLayout(),
.format = context.getFormat()})) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
}
// Not having a proxy is OK, it may happen in VTS tests when streams are opened on unconnected
// mix ports.
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleBluetooth::findOrCreateProxy(const AudioPort& audioPort,
CachedProxy& proxy) {
if (auto proxyIt = mProxies.find(audioPort.id); proxyIt != mProxies.end()) {
proxy = proxyIt->second;
return ndk::ScopedAStatus::ok();
}
return createProxy(audioPort, audioPort.id, proxy);
}
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,272 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#define LOG_TAG "AHAL_StreamBluetooth"
#include <Utils.h>
#include <android-base/logging.h>
#include <audio_utils/clock.h>
#include "core-impl/StreamBluetooth.h"
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::hardware::audio::core::VendorParameter;
using aidl::android::hardware::bluetooth::audio::ChannelMode;
using aidl::android::hardware::bluetooth::audio::PcmConfiguration;
using aidl::android::hardware::bluetooth::audio::PresentationPosition;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioConfigBase;
using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioFormatDescription;
using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioOffloadInfo;
using aidl::android::media::audio::common::MicrophoneDynamicInfo;
using aidl::android::media::audio::common::MicrophoneInfo;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidl;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidlIn;
using android::bluetooth::audio::aidl::BluetoothAudioPortAidlOut;
using android::bluetooth::audio::aidl::BluetoothStreamState;
namespace aidl::android::hardware::audio::core {
constexpr int kBluetoothDefaultInputBufferMs = 20;
constexpr int kBluetoothDefaultOutputBufferMs = 10;
// constexpr int kBluetoothSpatializerOutputBufferMs = 10;
constexpr int kBluetoothDefaultRemoteDelayMs = 200;
StreamBluetooth::StreamBluetooth(StreamContext* context, const Metadata& metadata,
ModuleBluetooth::BtProfileHandles&& btHandles,
const std::shared_ptr<BluetoothAudioPortAidl>& btDeviceProxy,
const PcmConfiguration& pcmConfig)
: StreamCommonImpl(context, metadata),
mFrameSizeBytes(getContext().getFrameSize()),
mIsInput(isInput(metadata)),
mBluetoothA2dp(std::move(std::get<ModuleBluetooth::BtInterface::BTA2DP>(btHandles))),
mBluetoothLe(std::move(std::get<ModuleBluetooth::BtInterface::BTLE>(btHandles))),
mPreferredDataIntervalUs(pcmConfig.dataIntervalUs != 0
? pcmConfig.dataIntervalUs
: (mIsInput ? kBluetoothDefaultInputBufferMs
: kBluetoothDefaultOutputBufferMs) *
1000),
mBtDeviceProxy(btDeviceProxy) {}
StreamBluetooth::~StreamBluetooth() {
cleanupWorker();
}
::android::status_t StreamBluetooth::init(DriverCallbackInterface*) {
std::lock_guard guard(mLock);
if (mBtDeviceProxy == nullptr) {
// This is a normal situation in VTS tests.
LOG(INFO) << __func__ << ": no BT HAL proxy, stream is non-functional";
}
LOG(INFO) << __func__ << ": preferred data interval (us): " << mPreferredDataIntervalUs;
return ::android::OK;
}
::android::status_t StreamBluetooth::drain(StreamDescriptor::DrainMode) {
usleep(1000);
return ::android::OK;
}
::android::status_t StreamBluetooth::flush() {
usleep(1000);
return ::android::OK;
}
::android::status_t StreamBluetooth::pause() {
return standby();
}
::android::status_t StreamBluetooth::transfer(void* buffer, size_t frameCount,
size_t* actualFrameCount, int32_t* latencyMs) {
std::lock_guard guard(mLock);
*actualFrameCount = 0;
*latencyMs = StreamDescriptor::LATENCY_UNKNOWN;
if (mBtDeviceProxy == nullptr || mBtDeviceProxy->getState() == BluetoothStreamState::DISABLED) {
// The BT session is turned down, silently ignore write.
return ::android::OK;
}
if (!mBtDeviceProxy->start()) {
LOG(WARNING) << __func__ << ": state= " << mBtDeviceProxy->getState()
<< " failed to start, will retry";
return ::android::OK;
}
*latencyMs = 0;
const size_t bytesToTransfer = frameCount * mFrameSizeBytes;
const size_t bytesTransferred = mIsInput ? mBtDeviceProxy->readData(buffer, bytesToTransfer)
: mBtDeviceProxy->writeData(buffer, bytesToTransfer);
*actualFrameCount = bytesTransferred / mFrameSizeBytes;
PresentationPosition presentation_position;
if (!mBtDeviceProxy->getPresentationPosition(presentation_position)) {
presentation_position.remoteDeviceAudioDelayNanos =
kBluetoothDefaultRemoteDelayMs * NANOS_PER_MILLISECOND;
LOG(WARNING) << __func__ << ": getPresentationPosition failed, latency info is unavailable";
}
// TODO(b/317117580): incorporate logic from
// packages/modules/Bluetooth/system/audio_bluetooth_hw/stream_apis.cc
// out_calculate_feeding_delay_ms / in_calculate_starving_delay_ms
*latencyMs = std::max(*latencyMs, (int32_t)(presentation_position.remoteDeviceAudioDelayNanos /
NANOS_PER_MILLISECOND));
return ::android::OK;
}
// static
bool StreamBluetooth::checkConfigParams(const PcmConfiguration& pcmConfig,
const AudioConfigBase& config) {
if ((int)config.sampleRate != pcmConfig.sampleRateHz) {
LOG(ERROR) << __func__ << ": sample rate mismatch, stream value=" << config.sampleRate
<< ", BT HAL value=" << pcmConfig.sampleRateHz;
return false;
}
const auto channelCount =
aidl::android::hardware::audio::common::getChannelCount(config.channelMask);
if ((pcmConfig.channelMode == ChannelMode::MONO && channelCount != 1) ||
(pcmConfig.channelMode == ChannelMode::STEREO && channelCount != 2)) {
LOG(ERROR) << __func__ << ": Channel count mismatch, stream value=" << channelCount
<< ", BT HAL value=" << toString(pcmConfig.channelMode);
return false;
}
if (config.format.type != AudioFormatType::PCM) {
LOG(ERROR) << __func__
<< ": unexpected stream format type: " << toString(config.format.type);
return false;
}
const int8_t bitsPerSample =
aidl::android::hardware::audio::common::getPcmSampleSizeInBytes(config.format.pcm) * 8;
if (bitsPerSample != pcmConfig.bitsPerSample) {
LOG(ERROR) << __func__ << ": bits per sample mismatch, stream value=" << bitsPerSample
<< ", BT HAL value=" << pcmConfig.bitsPerSample;
return false;
}
return true;
}
ndk::ScopedAStatus StreamBluetooth::prepareToClose() {
std::lock_guard guard(mLock);
if (mBtDeviceProxy != nullptr) {
if (mBtDeviceProxy->getState() != BluetoothStreamState::DISABLED) {
mBtDeviceProxy->stop();
}
}
return ndk::ScopedAStatus::ok();
}
::android::status_t StreamBluetooth::standby() {
std::lock_guard guard(mLock);
if (mBtDeviceProxy != nullptr) mBtDeviceProxy->suspend();
return ::android::OK;
}
::android::status_t StreamBluetooth::start() {
std::lock_guard guard(mLock);
if (mBtDeviceProxy != nullptr) mBtDeviceProxy->start();
return ::android::OK;
}
void StreamBluetooth::shutdown() {
std::lock_guard guard(mLock);
if (mBtDeviceProxy != nullptr) {
mBtDeviceProxy->stop();
mBtDeviceProxy = nullptr;
}
}
ndk::ScopedAStatus StreamBluetooth::updateMetadataCommon(const Metadata& metadata) {
std::lock_guard guard(mLock);
if (mBtDeviceProxy == nullptr) {
return ndk::ScopedAStatus::ok();
}
bool isOk = true;
if (isInput(metadata)) {
isOk = mBtDeviceProxy->updateSinkMetadata(std::get<SinkMetadata>(metadata));
} else {
isOk = mBtDeviceProxy->updateSourceMetadata(std::get<SourceMetadata>(metadata));
}
return isOk ? ndk::ScopedAStatus::ok()
: ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
ndk::ScopedAStatus StreamBluetooth::bluetoothParametersUpdated() {
if (mIsInput) {
return ndk::ScopedAStatus::ok();
}
auto applyParam = [](const std::shared_ptr<BluetoothAudioPortAidl>& proxy,
bool isEnabled) -> bool {
if (!isEnabled) {
if (proxy->suspend()) return proxy->setState(BluetoothStreamState::DISABLED);
return false;
}
return proxy->standby();
};
bool hasA2dpParam, enableA2dp;
auto btA2dp = mBluetoothA2dp.lock();
hasA2dpParam = btA2dp != nullptr && btA2dp->isEnabled(&enableA2dp).isOk();
bool hasLeParam, enableLe;
auto btLe = mBluetoothLe.lock();
hasLeParam = btLe != nullptr && btLe->isEnabled(&enableLe).isOk();
std::lock_guard guard(mLock);
if (mBtDeviceProxy != nullptr) {
if ((hasA2dpParam && mBtDeviceProxy->isA2dp() && !applyParam(mBtDeviceProxy, enableA2dp)) ||
(hasLeParam && mBtDeviceProxy->isLeAudio() && !applyParam(mBtDeviceProxy, enableLe))) {
LOG(DEBUG) << __func__ << ": applyParam failed";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
}
return ndk::ScopedAStatus::ok();
}
// static
int32_t StreamInBluetooth::getNominalLatencyMs(size_t dataIntervalUs) {
if (dataIntervalUs == 0) dataIntervalUs = kBluetoothDefaultInputBufferMs * 1000LL;
return dataIntervalUs / 1000LL;
}
StreamInBluetooth::StreamInBluetooth(StreamContext&& context, const SinkMetadata& sinkMetadata,
const std::vector<MicrophoneInfo>& microphones,
ModuleBluetooth::BtProfileHandles&& btProfileHandles,
const std::shared_ptr<BluetoothAudioPortAidl>& btDeviceProxy,
const PcmConfiguration& pcmConfig)
: StreamIn(std::move(context), microphones),
StreamBluetooth(&mContextInstance, sinkMetadata, std::move(btProfileHandles), btDeviceProxy,
pcmConfig) {}
ndk::ScopedAStatus StreamInBluetooth::getActiveMicrophones(
std::vector<MicrophoneDynamicInfo>* _aidl_return __unused) {
LOG(DEBUG) << __func__ << ": not supported";
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
}
// static
int32_t StreamOutBluetooth::getNominalLatencyMs(size_t dataIntervalUs) {
if (dataIntervalUs == 0) dataIntervalUs = kBluetoothDefaultOutputBufferMs * 1000LL;
return dataIntervalUs / 1000LL;
}
StreamOutBluetooth::StreamOutBluetooth(StreamContext&& context,
const SourceMetadata& sourceMetadata,
const std::optional<AudioOffloadInfo>& offloadInfo,
ModuleBluetooth::BtProfileHandles&& btProfileHandles,
const std::shared_ptr<BluetoothAudioPortAidl>& btDeviceProxy,
const PcmConfiguration& pcmConfig)
: StreamOut(std::move(context), offloadInfo),
StreamBluetooth(&mContextInstance, sourceMetadata, std::move(btProfileHandles), btDeviceProxy,
pcmConfig) {}
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,16 @@
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
xsd_config {
name: "audio_policy_configuration_aidl_default",
srcs: ["audio_policy_configuration.xsd"],
package_name: "android.audio.policy.configuration",
nullability: true,
}

View File

@@ -0,0 +1,582 @@
// Signature format: 2.0
package android.audio.policy.configuration {
public class AttachedDevices {
ctor public AttachedDevices();
method @Nullable public java.util.List<java.lang.String> getItem();
}
public enum AudioChannelMask {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_10;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_11;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_12;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_13;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_14;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_15;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_16;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_17;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_18;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_19;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_20;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_21;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_22;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_23;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_24;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_3;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_4;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_5;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_6;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_7;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_8;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_INDEX_MASK_9;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT0POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_2POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT0POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_3POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_5POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_6;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_FRONT_BACK;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_MONO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_PENTA;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_QUAD;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_STEREO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_TRI;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_CALL_MONO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_NONE;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_13POINT0;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_22POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT0POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_2POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT0POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_3POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1POINT4;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_BACK;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_5POINT1_SIDE;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_6POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT2;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_7POINT1POINT4;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_9POINT1POINT4;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_9POINT1POINT6;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_HAPTIC_AB;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_A;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_PENTA;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_BACK;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_QUAD_SIDE;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_SURROUND;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_TRI;
enum_constant public static final android.audio.policy.configuration.AudioChannelMask AUDIO_CHANNEL_OUT_TRI_BACK;
}
public enum AudioDevice {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_AMBIENT;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_AUX_DIGITAL;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BACK_MIC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLE_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_BLE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BUILTIN_MIC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_BUS;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_COMMUNICATION;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_DEFAULT;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_ECHO_REFERENCE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_FM_TUNER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI_ARC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_HDMI_EARC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_IP;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_LINE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_LOOPBACK;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_PROXY;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_REMOTE_SUBMIX;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_SPDIF;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_STUB;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_TELEPHONY_RX;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_TV_TUNER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_ACCESSORY;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_DEVICE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_USB_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_VOICE_CALL;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_IN_WIRED_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_NONE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_AUX_DIGITAL;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_AUX_LINE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_BROADCAST;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLE_SPEAKER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_BUS;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_DEFAULT;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_EARPIECE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_ECHO_CANCELLER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_FM;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI_ARC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HDMI_EARC;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_HEARING_AID;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_IP;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_LINE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_PROXY;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPDIF;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPEAKER;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_SPEAKER_SAFE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_STUB;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_TELEPHONY_TX;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_ACCESSORY;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_DEVICE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_USB_HEADSET;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
enum_constant public static final android.audio.policy.configuration.AudioDevice AUDIO_DEVICE_OUT_WIRED_HEADSET;
}
public enum AudioFormat {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADIF;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_ELD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_ERLC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V1;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_HE_V2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_LTP;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_MAIN;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_SCALABLE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_SSR;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ADTS_XHE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ELD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_ERLC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_HE_V1;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_HE_V2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V1;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_HE_V2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LATM_LC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_LTP;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_MAIN;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_SCALABLE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_SSR;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AAC_XHE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC4;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AC4_L4;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_ALAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_NB;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_WB;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_AMR_WB_PLUS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE_QLEA;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_ADAPTIVE_R4;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_HD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_APTX_TWSP;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_CELT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DEFAULT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DOLBY_TRUEHD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DRA;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DSD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_HD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_HD_MA;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_UHD;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_DTS_UHD_P2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCB;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCNW;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_EVRCWB;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_E_AC3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_E_AC3_JOC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_FLAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_HE_AAC_V1;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_HE_AAC_V2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_AAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_AAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_FLAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_OPUS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_ENHANCED_PCM;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_FLAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_OPUS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_BASE_PCM;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_AAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_FLAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_OPUS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IAMF_SIMPLE_PCM;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IEC60958;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_IEC61937;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LC3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LDAC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LHDC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_LHDC_LL;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_1_0;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_2_0;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MAT_2_1;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MP2;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MP3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_BL_L3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_BL_L4;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_LC_L3;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_MPEGH_LC_L4;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_OPUS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_16_BIT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_24_BIT_PACKED;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_32_BIT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_8_24_BIT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_8_BIT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_PCM_FLOAT;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_QCELP;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_SBC;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_VORBIS;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_WMA;
enum_constant public static final android.audio.policy.configuration.AudioFormat AUDIO_FORMAT_WMA_PRO;
}
public enum AudioGainMode {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_CHANNELS;
enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_JOINT;
enum_constant public static final android.audio.policy.configuration.AudioGainMode AUDIO_GAIN_MODE_RAMP;
}
public enum AudioInOutFlag {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_DIRECT;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_FAST;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HOTWORD_TAP;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_AV_SYNC;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_HOTWORD;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_HW_LOOKBACK;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_MMAP_NOIRQ;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_RAW;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_SYNC;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_ULTRASOUND;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_INPUT_FLAG_VOIP_TX;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_BIT_PERFECT;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DIRECT;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_DIRECT_PCM;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_FAST;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_NON_BLOCKING;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_PRIMARY;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_RAW;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_SPATIALIZER;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_SYNC;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_TTS;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_ULTRASOUND;
enum_constant public static final android.audio.policy.configuration.AudioInOutFlag AUDIO_OUTPUT_FLAG_VOIP_RX;
}
public class AudioPolicyConfiguration {
ctor public AudioPolicyConfiguration();
method @Nullable public android.audio.policy.configuration.GlobalConfiguration getGlobalConfiguration();
method @Nullable public java.util.List<android.audio.policy.configuration.Modules> getModules();
method @Nullable public android.audio.policy.configuration.SurroundSound getSurroundSound();
method @Nullable public android.audio.policy.configuration.Version getVersion();
method @Nullable public java.util.List<android.audio.policy.configuration.Volumes> getVolumes();
method public void setGlobalConfiguration(@Nullable android.audio.policy.configuration.GlobalConfiguration);
method public void setSurroundSound(@Nullable android.audio.policy.configuration.SurroundSound);
method public void setVersion(@Nullable android.audio.policy.configuration.Version);
}
public enum AudioSource {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_CAMCORDER;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_DEFAULT;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_ECHO_REFERENCE;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_FM_TUNER;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_HOTWORD;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_MIC;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_REMOTE_SUBMIX;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_ULTRASOUND;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_UNPROCESSED;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_CALL;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_COMMUNICATION;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_DOWNLINK;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_PERFORMANCE;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_RECOGNITION;
enum_constant public static final android.audio.policy.configuration.AudioSource AUDIO_SOURCE_VOICE_UPLINK;
}
public enum AudioStreamType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ACCESSIBILITY;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ALARM;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ASSISTANT;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_BLUETOOTH_SCO;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_CALL_ASSISTANT;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_DTMF;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_ENFORCED_AUDIBLE;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_MUSIC;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_NOTIFICATION;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_PATCH;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_REROUTING;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_RING;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_SYSTEM;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_TTS;
enum_constant public static final android.audio.policy.configuration.AudioStreamType AUDIO_STREAM_VOICE_CALL;
}
public enum DeviceCategory {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_EARPIECE;
enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_EXT_MEDIA;
enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_HEADSET;
enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_HEARING_AID;
enum_constant public static final android.audio.policy.configuration.DeviceCategory DEVICE_CATEGORY_SPEAKER;
}
public class DevicePorts {
ctor public DevicePorts();
method @Nullable public java.util.List<android.audio.policy.configuration.DevicePorts.DevicePort> getDevicePort();
}
public static class DevicePorts.DevicePort {
ctor public DevicePorts.DevicePort();
method @Nullable public String getAddress();
method @Nullable public java.util.List<java.lang.String> getEncodedFormats();
method @Nullable public android.audio.policy.configuration.Gains getGains();
method @Nullable public java.util.List<android.audio.policy.configuration.Profile> getProfile();
method @Nullable public android.audio.policy.configuration.Role getRole();
method @Nullable public String getTagName();
method @Nullable public String getType();
method @Nullable public boolean get_default();
method public void setAddress(@Nullable String);
method public void setEncodedFormats(@Nullable java.util.List<java.lang.String>);
method public void setGains(@Nullable android.audio.policy.configuration.Gains);
method public void setRole(@Nullable android.audio.policy.configuration.Role);
method public void setTagName(@Nullable String);
method public void setType(@Nullable String);
method public void set_default(@Nullable boolean);
}
public enum EngineSuffix {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.EngineSuffix _default;
enum_constant public static final android.audio.policy.configuration.EngineSuffix configurable;
}
public class Gains {
ctor public Gains();
method @Nullable public java.util.List<android.audio.policy.configuration.Gains.Gain> getGain();
}
public static class Gains.Gain {
ctor public Gains.Gain();
method @Nullable public android.audio.policy.configuration.AudioChannelMask getChannel_mask();
method @Nullable public int getDefaultValueMB();
method @Nullable public int getMaxRampMs();
method @Nullable public int getMaxValueMB();
method @Nullable public int getMinRampMs();
method @Nullable public int getMinValueMB();
method @Nullable public java.util.List<android.audio.policy.configuration.AudioGainMode> getMode();
method @Nullable public int getStepValueMB();
method @Nullable public boolean getUseForVolume();
method public void setChannel_mask(@Nullable android.audio.policy.configuration.AudioChannelMask);
method public void setDefaultValueMB(@Nullable int);
method public void setMaxRampMs(@Nullable int);
method public void setMaxValueMB(@Nullable int);
method public void setMinRampMs(@Nullable int);
method public void setMinValueMB(@Nullable int);
method public void setMode(@Nullable java.util.List<android.audio.policy.configuration.AudioGainMode>);
method public void setStepValueMB(@Nullable int);
method public void setUseForVolume(@Nullable boolean);
}
public class GlobalConfiguration {
ctor public GlobalConfiguration();
method @Nullable public boolean getCall_screen_mode_supported();
method @Nullable public android.audio.policy.configuration.EngineSuffix getEngine_library();
method @Nullable public boolean getSpeaker_drc_enabled();
method public void setCall_screen_mode_supported(@Nullable boolean);
method public void setEngine_library(@Nullable android.audio.policy.configuration.EngineSuffix);
method public void setSpeaker_drc_enabled(@Nullable boolean);
}
public enum HalVersion {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.HalVersion _2_0;
enum_constant public static final android.audio.policy.configuration.HalVersion _3_0;
}
public class MixPorts {
ctor public MixPorts();
method @Nullable public java.util.List<android.audio.policy.configuration.MixPorts.MixPort> getMixPort();
}
public static class MixPorts.MixPort {
ctor public MixPorts.MixPort();
method @Nullable public java.util.List<android.audio.policy.configuration.AudioInOutFlag> getFlags();
method @Nullable public android.audio.policy.configuration.Gains getGains();
method @Nullable public long getMaxActiveCount();
method @Nullable public long getMaxOpenCount();
method @Nullable public String getName();
method @Nullable public java.util.List<android.audio.policy.configuration.Profile> getProfile();
method @Nullable public long getRecommendedMuteDurationMs();
method @Nullable public android.audio.policy.configuration.Role getRole();
method public void setFlags(@Nullable java.util.List<android.audio.policy.configuration.AudioInOutFlag>);
method public void setGains(@Nullable android.audio.policy.configuration.Gains);
method public void setMaxActiveCount(@Nullable long);
method public void setMaxOpenCount(@Nullable long);
method public void setName(@Nullable String);
method public void setRecommendedMuteDurationMs(@Nullable long);
method public void setRole(@Nullable android.audio.policy.configuration.Role);
}
public enum MixType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.MixType mix;
enum_constant public static final android.audio.policy.configuration.MixType mux;
}
public class Modules {
ctor public Modules();
method @Nullable public java.util.List<android.audio.policy.configuration.Modules.Module> getModule();
}
public static class Modules.Module {
ctor public Modules.Module();
method @Nullable public android.audio.policy.configuration.AttachedDevices getAttachedDevices();
method @Nullable public String getDefaultOutputDevice();
method @Nullable public android.audio.policy.configuration.DevicePorts getDevicePorts();
method @Nullable public android.audio.policy.configuration.HalVersion getHalVersion();
method @Nullable public android.audio.policy.configuration.MixPorts getMixPorts();
method @Nullable public String getName();
method @Nullable public android.audio.policy.configuration.Routes getRoutes();
method public void setAttachedDevices(@Nullable android.audio.policy.configuration.AttachedDevices);
method public void setDefaultOutputDevice(@Nullable String);
method public void setDevicePorts(@Nullable android.audio.policy.configuration.DevicePorts);
method public void setHalVersion(@Nullable android.audio.policy.configuration.HalVersion);
method public void setMixPorts(@Nullable android.audio.policy.configuration.MixPorts);
method public void setName(@Nullable String);
method public void setRoutes(@Nullable android.audio.policy.configuration.Routes);
}
public class Profile {
ctor public Profile();
method @Nullable public java.util.List<android.audio.policy.configuration.AudioChannelMask> getChannelMasks();
method @Nullable public String getFormat();
method @Nullable public java.util.List<java.math.BigInteger> getSamplingRates();
method public void setChannelMasks(@Nullable java.util.List<android.audio.policy.configuration.AudioChannelMask>);
method public void setFormat(@Nullable String);
method public void setSamplingRates(@Nullable java.util.List<java.math.BigInteger>);
}
public class Reference {
ctor public Reference();
method @Nullable public String getName();
method @Nullable public java.util.List<java.lang.String> getPoint();
method public void setName(@Nullable String);
}
public enum Role {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.Role sink;
enum_constant public static final android.audio.policy.configuration.Role source;
}
public class Routes {
ctor public Routes();
method @Nullable public java.util.List<android.audio.policy.configuration.Routes.Route> getRoute();
}
public static class Routes.Route {
ctor public Routes.Route();
method @Nullable public String getSink();
method @Nullable public String getSources();
method @Nullable public android.audio.policy.configuration.MixType getType();
method public void setSink(@Nullable String);
method public void setSources(@Nullable String);
method public void setType(@Nullable android.audio.policy.configuration.MixType);
}
public class SurroundFormats {
ctor public SurroundFormats();
method @Nullable public java.util.List<android.audio.policy.configuration.SurroundFormats.Format> getFormat();
}
public static class SurroundFormats.Format {
ctor public SurroundFormats.Format();
method @Nullable public String getName();
method @Nullable public java.util.List<java.lang.String> getSubformats();
method public void setName(@Nullable String);
method public void setSubformats(@Nullable java.util.List<java.lang.String>);
}
public class SurroundSound {
ctor public SurroundSound();
method @Nullable public android.audio.policy.configuration.SurroundFormats getFormats();
method public void setFormats(@Nullable android.audio.policy.configuration.SurroundFormats);
}
public enum Version {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.configuration.Version _7_0;
enum_constant public static final android.audio.policy.configuration.Version _7_1;
}
public class Volume {
ctor public Volume();
method @Nullable public android.audio.policy.configuration.DeviceCategory getDeviceCategory();
method @Nullable public java.util.List<java.lang.String> getPoint();
method @Nullable public String getRef();
method @Nullable public android.audio.policy.configuration.AudioStreamType getStream();
method public void setDeviceCategory(@Nullable android.audio.policy.configuration.DeviceCategory);
method public void setRef(@Nullable String);
method public void setStream(@Nullable android.audio.policy.configuration.AudioStreamType);
}
public class Volumes {
ctor public Volumes();
method @Nullable public java.util.List<android.audio.policy.configuration.Reference> getReference();
method @Nullable public java.util.List<android.audio.policy.configuration.Volume> getVolume();
}
public class XmlParser {
ctor public XmlParser();
method @Nullable public static android.audio.policy.configuration.AudioPolicyConfiguration read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
}
}

View File

@@ -0,0 +1 @@
// Signature format: 2.0

View File

@@ -0,0 +1,778 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright (C) 2020 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<xs:schema version="2.0"
elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns:xs="http://www.w3.org/2001/XMLSchema">
<!-- List the config versions supported by audio policy. -->
<xs:simpleType name="version">
<xs:restriction base="xs:decimal">
<xs:enumeration value="7.0"/>
<xs:enumeration value="7.1"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="halVersion">
<xs:annotation>
<xs:documentation xml:lang="en">
Version of the interface the hal implements. Note that this
relates to legacy HAL API versions since HIDL APIs are versioned
using other mechanisms.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:decimal">
<!-- List of HAL versions supported by the framework. -->
<xs:enumeration value="2.0"/>
<xs:enumeration value="3.0"/>
</xs:restriction>
</xs:simpleType>
<xs:element name="audioPolicyConfiguration">
<xs:complexType>
<xs:sequence>
<xs:element name="globalConfiguration" type="globalConfiguration"/>
<xs:element name="modules" type="modules" maxOccurs="unbounded"/>
<xs:element name="volumes" type="volumes" maxOccurs="unbounded"/>
<xs:element name="surroundSound" type="surroundSound" minOccurs="0" />
</xs:sequence>
<xs:attribute name="version" type="version"/>
</xs:complexType>
<xs:key name="moduleNameKey">
<xs:selector xpath="modules/module"/>
<xs:field xpath="@name"/>
</xs:key>
<xs:unique name="volumeTargetUniqueness">
<xs:selector xpath="volumes/volume"/>
<xs:field xpath="@stream"/>
<xs:field xpath="@deviceCategory"/>
</xs:unique>
<xs:key name="volumeCurveNameKey">
<xs:selector xpath="volumes/reference"/>
<xs:field xpath="@name"/>
</xs:key>
<xs:keyref name="volumeCurveRef" refer="volumeCurveNameKey">
<xs:selector xpath="volumes/volume"/>
<xs:field xpath="@ref"/>
</xs:keyref>
</xs:element>
<xs:complexType name="globalConfiguration">
<xs:attribute name="speaker_drc_enabled" type="xs:boolean" use="required"/>
<xs:attribute name="call_screen_mode_supported" type="xs:boolean" use="optional"/>
<xs:attribute name="engine_library" type="engineSuffix" use="optional"/>
</xs:complexType>
<xs:complexType name="modules">
<xs:annotation>
<xs:documentation xml:lang="en">
There should be one section per audio HW module present on the platform.
Each <module/> contains two mandatory tags: “halVersion” and “name”.
The module "name" is the same as in previous .conf file.
Each module must contain the following sections:
- <devicePorts/>: a list of device descriptors for all
input and output devices accessible via this module.
This contains both permanently attached devices and removable devices.
- <mixPorts/>: listing all output and input streams exposed by the audio HAL
- <routes/>: list of possible connections between input
and output devices or between stream and devices.
A <route/> is defined by a set of 3 attributes:
-"type": mux|mix means all sources are mutual exclusive (mux) or can be mixed (mix)
-"sink": the sink involved in this route
-"sources": all the sources than can be connected to the sink via this route
- <attachedDevices/>: permanently attached devices.
The attachedDevices section is a list of devices names.
Their names correspond to device names defined in "devicePorts" section.
- <defaultOutputDevice/> is the device to be used when no policy rule applies
</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="module" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="attachedDevices" type="attachedDevices" minOccurs="0">
<xs:unique name="attachedDevicesUniqueness">
<xs:selector xpath="item"/>
<xs:field xpath="."/>
</xs:unique>
</xs:element>
<xs:element name="defaultOutputDevice" type="xs:token" minOccurs="0"/>
<xs:element name="mixPorts" type="mixPorts" minOccurs="0"/>
<xs:element name="devicePorts" type="devicePorts" minOccurs="0"/>
<xs:element name="routes" type="routes" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required"/>
<xs:attribute name="halVersion" type="halVersion" use="required"/>
</xs:complexType>
<xs:unique name="mixPortNameUniqueness">
<xs:selector xpath="mixPorts/mixPort"/>
<xs:field xpath="@name"/>
</xs:unique>
<xs:key name="devicePortNameKey">
<xs:selector xpath="devicePorts/devicePort"/>
<xs:field xpath="@tagName"/>
</xs:key>
<xs:unique name="devicePortUniqueness">
<xs:selector xpath="devicePorts/devicePort"/>
<xs:field xpath="@type"/>
<xs:field xpath="@address"/>
</xs:unique>
<xs:keyref name="defaultOutputDeviceRef" refer="devicePortNameKey">
<xs:selector xpath="defaultOutputDevice"/>
<xs:field xpath="."/>
</xs:keyref>
<xs:keyref name="attachedDeviceRef" refer="devicePortNameKey">
<xs:selector xpath="attachedDevices/item"/>
<xs:field xpath="."/>
</xs:keyref>
<!-- The following 3 constraints try to make sure each sink port
is reference in one an only one route. -->
<xs:key name="routeSinkKey">
<!-- predicate [@type='sink'] does not work in xsd 1.0 -->
<xs:selector xpath="devicePorts/devicePort|mixPorts/mixPort"/>
<xs:field xpath="@tagName|@name"/>
</xs:key>
<xs:keyref name="routeSinkRef" refer="routeSinkKey">
<xs:selector xpath="routes/route"/>
<xs:field xpath="@sink"/>
</xs:keyref>
<xs:unique name="routeUniqueness">
<xs:selector xpath="routes/route"/>
<xs:field xpath="@sink"/>
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="attachedDevices">
<xs:sequence>
<xs:element name="item" type="xs:token" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="audioInOutFlag">
<xs:annotation>
<xs:documentation xml:lang="en">
The flags indicate suggested stream attributes supported by the profile.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_OUTPUT_FLAG_DIRECT" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_PRIMARY" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_FAST" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_DEEP_BUFFER" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_NON_BLOCKING" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_HW_AV_SYNC" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_TTS" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_RAW" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_SYNC" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_DIRECT_PCM" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_MMAP_NOIRQ" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_VOIP_RX" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_INCALL_MUSIC" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_SPATIALIZER" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_ULTRASOUND" />
<xs:enumeration value="AUDIO_OUTPUT_FLAG_BIT_PERFECT" />
<xs:enumeration value="AUDIO_INPUT_FLAG_FAST" />
<xs:enumeration value="AUDIO_INPUT_FLAG_HW_HOTWORD" />
<xs:enumeration value="AUDIO_INPUT_FLAG_RAW" />
<xs:enumeration value="AUDIO_INPUT_FLAG_SYNC" />
<xs:enumeration value="AUDIO_INPUT_FLAG_MMAP_NOIRQ" />
<xs:enumeration value="AUDIO_INPUT_FLAG_VOIP_TX" />
<xs:enumeration value="AUDIO_INPUT_FLAG_HW_AV_SYNC" />
<xs:enumeration value="AUDIO_INPUT_FLAG_DIRECT" />
<xs:enumeration value="AUDIO_INPUT_FLAG_ULTRASOUND" />
<xs:enumeration value="AUDIO_INPUT_FLAG_HOTWORD_TAP" />
<xs:enumeration value="AUDIO_INPUT_FLAG_HW_LOOKBACK" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioInOutFlags">
<xs:list itemType="audioInOutFlag" />
</xs:simpleType>
<xs:simpleType name="role">
<xs:restriction base="xs:string">
<xs:enumeration value="sink"/>
<xs:enumeration value="source"/>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="mixPorts">
<xs:sequence>
<xs:element name="mixPort" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="gains" type="gains" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
<xs:attribute name="role" type="role" use="required"/>
<xs:attribute name="flags" type="audioInOutFlags"/>
<xs:attribute name="maxOpenCount" type="xs:unsignedInt"/>
<xs:attribute name="maxActiveCount" type="xs:unsignedInt"/>
<xs:attribute name="recommendedMuteDurationMs" type="xs:unsignedInt"/>
</xs:complexType>
<xs:unique name="mixPortProfileUniqueness">
<xs:selector xpath="profile"/>
<xs:field xpath="format"/>
<xs:field xpath="samplingRate"/>
<xs:field xpath="channelMasks"/>
</xs:unique>
<xs:unique name="mixPortGainUniqueness">
<xs:selector xpath="gains/gain"/>
<xs:field xpath="@name"/>
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="audioDevice">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_DEVICE_NONE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_EARPIECE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_WIRED_HEADPHONE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_HDMI"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_HDMI_EARC"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_AUX_DIGITAL"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_ACCESSORY"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_DEVICE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_REMOTE_SUBMIX"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_TELEPHONY_TX"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_LINE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_HDMI_ARC"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_SPDIF"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_FM"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_AUX_LINE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_SPEAKER_SAFE"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_IP"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BUS"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_PROXY"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_USB_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_HEARING_AID"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_ECHO_CANCELLER"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLE_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLE_SPEAKER"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_BLE_BROADCAST"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_DEFAULT"/>
<xs:enumeration value="AUDIO_DEVICE_OUT_STUB"/>
<xs:enumeration value="AUDIO_DEVICE_IN_COMMUNICATION"/>
<xs:enumeration value="AUDIO_DEVICE_IN_AMBIENT"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BUILTIN_MIC"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_WIRED_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_HDMI"/>
<xs:enumeration value="AUDIO_DEVICE_IN_AUX_DIGITAL"/>
<xs:enumeration value="AUDIO_DEVICE_IN_VOICE_CALL"/>
<xs:enumeration value="AUDIO_DEVICE_IN_TELEPHONY_RX"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BACK_MIC"/>
<xs:enumeration value="AUDIO_DEVICE_IN_REMOTE_SUBMIX"/>
<xs:enumeration value="AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_USB_ACCESSORY"/>
<xs:enumeration value="AUDIO_DEVICE_IN_USB_DEVICE"/>
<xs:enumeration value="AUDIO_DEVICE_IN_FM_TUNER"/>
<xs:enumeration value="AUDIO_DEVICE_IN_TV_TUNER"/>
<xs:enumeration value="AUDIO_DEVICE_IN_LINE"/>
<xs:enumeration value="AUDIO_DEVICE_IN_SPDIF"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_A2DP"/>
<xs:enumeration value="AUDIO_DEVICE_IN_LOOPBACK"/>
<xs:enumeration value="AUDIO_DEVICE_IN_IP"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BUS"/>
<xs:enumeration value="AUDIO_DEVICE_IN_PROXY"/>
<xs:enumeration value="AUDIO_DEVICE_IN_USB_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BLUETOOTH_BLE"/>
<xs:enumeration value="AUDIO_DEVICE_IN_HDMI_ARC"/>
<xs:enumeration value="AUDIO_DEVICE_IN_HDMI_EARC"/>
<xs:enumeration value="AUDIO_DEVICE_IN_ECHO_REFERENCE"/>
<xs:enumeration value="AUDIO_DEVICE_IN_BLE_HEADSET"/>
<xs:enumeration value="AUDIO_DEVICE_IN_DEFAULT"/>
<xs:enumeration value="AUDIO_DEVICE_IN_STUB"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="vendorExtension">
<!-- Vendor extension names must be prefixed by "VX_" to distinguish them from
AOSP values. Vendors must namespace their names to avoid conflicts. The
namespace part must only use capital latin characters and decimal digits and
consist of at least 3 characters. The part of the extension name after the
namespace may in addition include underscores. Example for a hypothetical
Google virtual reality device:
<devicePort tagName="VR" type="VX_GOOGLE_VR" role="sink" />
-->
<xs:restriction base="xs:string">
<xs:pattern value="VX_[A-Z0-9]{3,}_[_A-Z0-9]+"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="extendableAudioDevice">
<xs:union memberTypes="audioDevice vendorExtension"/>
</xs:simpleType>
<xs:simpleType name="audioFormat">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_FORMAT_DEFAULT" />
<xs:enumeration value="AUDIO_FORMAT_PCM_16_BIT" />
<xs:enumeration value="AUDIO_FORMAT_PCM_8_BIT"/>
<xs:enumeration value="AUDIO_FORMAT_PCM_32_BIT"/>
<xs:enumeration value="AUDIO_FORMAT_PCM_8_24_BIT"/>
<xs:enumeration value="AUDIO_FORMAT_PCM_FLOAT"/>
<xs:enumeration value="AUDIO_FORMAT_PCM_24_BIT_PACKED"/>
<xs:enumeration value="AUDIO_FORMAT_MP3"/>
<xs:enumeration value="AUDIO_FORMAT_AMR_NB"/>
<xs:enumeration value="AUDIO_FORMAT_AMR_WB"/>
<xs:enumeration value="AUDIO_FORMAT_AAC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_MAIN"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_SSR"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LTP"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_HE_V1"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_SCALABLE"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ERLC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LD"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_HE_V2"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ELD"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_XHE"/>
<xs:enumeration value="AUDIO_FORMAT_HE_AAC_V1"/>
<xs:enumeration value="AUDIO_FORMAT_HE_AAC_V2"/>
<xs:enumeration value="AUDIO_FORMAT_VORBIS"/>
<xs:enumeration value="AUDIO_FORMAT_OPUS"/>
<xs:enumeration value="AUDIO_FORMAT_AC3"/>
<xs:enumeration value="AUDIO_FORMAT_E_AC3"/>
<xs:enumeration value="AUDIO_FORMAT_E_AC3_JOC"/>
<xs:enumeration value="AUDIO_FORMAT_DTS"/>
<xs:enumeration value="AUDIO_FORMAT_DTS_HD"/>
<xs:enumeration value="AUDIO_FORMAT_IEC61937"/>
<xs:enumeration value="AUDIO_FORMAT_DOLBY_TRUEHD"/>
<xs:enumeration value="AUDIO_FORMAT_EVRC"/>
<xs:enumeration value="AUDIO_FORMAT_EVRCB"/>
<xs:enumeration value="AUDIO_FORMAT_EVRCWB"/>
<xs:enumeration value="AUDIO_FORMAT_EVRCNW"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADIF"/>
<xs:enumeration value="AUDIO_FORMAT_WMA"/>
<xs:enumeration value="AUDIO_FORMAT_WMA_PRO"/>
<xs:enumeration value="AUDIO_FORMAT_AMR_WB_PLUS"/>
<xs:enumeration value="AUDIO_FORMAT_MP2"/>
<xs:enumeration value="AUDIO_FORMAT_QCELP"/>
<xs:enumeration value="AUDIO_FORMAT_DSD"/>
<xs:enumeration value="AUDIO_FORMAT_FLAC"/>
<xs:enumeration value="AUDIO_FORMAT_ALAC"/>
<xs:enumeration value="AUDIO_FORMAT_APE"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_MAIN"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SSR"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LTP"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V1"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_SCALABLE"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ERLC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_LD"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_HE_V2"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_ELD"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_ADTS_XHE"/>
<xs:enumeration value="AUDIO_FORMAT_SBC"/>
<xs:enumeration value="AUDIO_FORMAT_APTX"/>
<xs:enumeration value="AUDIO_FORMAT_APTX_HD"/>
<xs:enumeration value="AUDIO_FORMAT_AC4"/>
<xs:enumeration value="AUDIO_FORMAT_AC4_L4"/>
<xs:enumeration value="AUDIO_FORMAT_LDAC"/>
<xs:enumeration value="AUDIO_FORMAT_MAT"/>
<xs:enumeration value="AUDIO_FORMAT_MAT_1_0"/>
<xs:enumeration value="AUDIO_FORMAT_MAT_2_0"/>
<xs:enumeration value="AUDIO_FORMAT_MAT_2_1"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LATM"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LATM_LC"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LATM_HE_V1"/>
<xs:enumeration value="AUDIO_FORMAT_AAC_LATM_HE_V2"/>
<xs:enumeration value="AUDIO_FORMAT_CELT"/>
<xs:enumeration value="AUDIO_FORMAT_APTX_ADAPTIVE"/>
<xs:enumeration value="AUDIO_FORMAT_LHDC"/>
<xs:enumeration value="AUDIO_FORMAT_LHDC_LL"/>
<xs:enumeration value="AUDIO_FORMAT_APTX_TWSP"/>
<xs:enumeration value="AUDIO_FORMAT_LC3"/>
<xs:enumeration value="AUDIO_FORMAT_MPEGH_BL_L3"/>
<xs:enumeration value="AUDIO_FORMAT_MPEGH_BL_L4"/>
<xs:enumeration value="AUDIO_FORMAT_MPEGH_LC_L3"/>
<xs:enumeration value="AUDIO_FORMAT_MPEGH_LC_L4"/>
<xs:enumeration value="AUDIO_FORMAT_IEC60958"/>
<xs:enumeration value="AUDIO_FORMAT_DTS_UHD"/>
<xs:enumeration value="AUDIO_FORMAT_DRA"/>
<xs:enumeration value="AUDIO_FORMAT_APTX_ADAPTIVE_QLEA"/>
<xs:enumeration value="AUDIO_FORMAT_APTX_ADAPTIVE_R4"/>
<xs:enumeration value="AUDIO_FORMAT_DTS_HD_MA"/>
<xs:enumeration value="AUDIO_FORMAT_DTS_UHD_P2"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_SIMPLE_OPUS"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_SIMPLE_AAC"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_SIMPLE_PCM"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_SIMPLE_FLAC"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_OPUS"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_AAC"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_PCM"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_FLAC"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_ENHANCED_OPUS"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_ENHANCED_AAC"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_ENHANCED_PCM"/>
<xs:enumeration value="AUDIO_FORMAT_IAMF_BASE_ENHANCED_FLAC"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="extendableAudioFormat">
<xs:union memberTypes="audioFormat vendorExtension"/>
</xs:simpleType>
<xs:simpleType name="samplingRates">
<xs:list itemType="xs:nonNegativeInteger" />
</xs:simpleType>
<xs:simpleType name="audioChannelMask">
<xs:annotation>
<xs:documentation xml:lang="en">
Audio channel mask specifies presence of particular channels.
There are two representations:
- representation position (traditional discrete channel specification,
e.g. "left", "right");
- indexed (this is similar to "tracks" in audio mixing, channels
are represented using numbers).
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_CHANNEL_NONE"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_TRI"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_TRI_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_2POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_3POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_QUAD_SIDE"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_SURROUND"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_PENTA"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1_SIDE"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_5POINT1POINT4"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_6POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_9POINT1POINT4"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_9POINT1POINT6"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_13POINT0"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_22POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_A"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_STEREO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_2POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_FRONT_BACK"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_TRI"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_3POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_6"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_2POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_2POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_3POINT0POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_3POINT1POINT2"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_QUAD"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_PENTA"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_5POINT1"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_IN_VOICE_CALL_MONO"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_1"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_2"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_3"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_4"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_5"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_6"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_7"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_8"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_9"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_10"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_11"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_12"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_13"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_14"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_15"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_16"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_17"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_18"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_19"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_20"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_21"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_22"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_23"/>
<xs:enumeration value="AUDIO_CHANNEL_INDEX_MASK_24"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="channelMasks">
<xs:list itemType="audioChannelMask" />
</xs:simpleType>
<xs:complexType name="profile">
<xs:attribute name="format" type="extendableAudioFormat" use="optional"/>
<xs:attribute name="samplingRates" type="samplingRates" use="optional"/>
<xs:attribute name="channelMasks" type="channelMasks" use="optional"/>
</xs:complexType>
<xs:simpleType name="audioGainMode">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_GAIN_MODE_JOINT"/>
<xs:enumeration value="AUDIO_GAIN_MODE_CHANNELS"/>
<xs:enumeration value="AUDIO_GAIN_MODE_RAMP"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioGainModeMaskUnrestricted">
<xs:list itemType="audioGainMode" />
</xs:simpleType>
<xs:simpleType name='audioGainModeMask'>
<xs:restriction base='audioGainModeMaskUnrestricted'>
<xs:minLength value='1' />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="gains">
<xs:sequence>
<xs:element name="gain" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:attribute name="mode" type="audioGainModeMask" use="required"/>
<xs:attribute name="channel_mask" type="audioChannelMask" use="optional"/>
<xs:attribute name="minValueMB" type="xs:int" use="optional"/>
<xs:attribute name="maxValueMB" type="xs:int" use="optional"/>
<xs:attribute name="defaultValueMB" type="xs:int" use="optional"/>
<xs:attribute name="stepValueMB" type="xs:int" use="optional"/>
<xs:attribute name="minRampMs" type="xs:int" use="optional"/>
<xs:attribute name="maxRampMs" type="xs:int" use="optional"/>
<xs:attribute name="useForVolume" type="xs:boolean" use="optional"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="devicePorts">
<xs:sequence>
<xs:element name="devicePort" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="gains" type="gains" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="tagName" type="xs:token" use="required"/>
<xs:attribute name="type" type="extendableAudioDevice" use="required"/>
<xs:attribute name="role" type="role" use="required"/>
<xs:attribute name="address" type="xs:string" use="optional" default=""/>
<!-- Note that XSD 1.0 can not check that a type only has one default. -->
<xs:attribute name="default" type="xs:boolean" use="optional">
<xs:annotation>
<xs:documentation xml:lang="en">
The default device will be used if multiple have the same type
and no explicit route request exists for a specific device of
that type.
</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="encodedFormats" type="audioFormatsList" use="optional"
default="" />
</xs:complexType>
<xs:unique name="devicePortProfileUniqueness">
<xs:selector xpath="profile"/>
<xs:field xpath="format"/>
<xs:field xpath="samplingRate"/>
<xs:field xpath="channelMasks"/>
</xs:unique>
<xs:unique name="devicePortGainUniqueness">
<xs:selector xpath="gains/gain"/>
<xs:field xpath="@name"/>
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="mixType">
<xs:restriction base="xs:string">
<xs:enumeration value="mix"/>
<xs:enumeration value="mux"/>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="routes">
<xs:sequence>
<xs:element name="route" minOccurs="0" maxOccurs="unbounded">
<xs:annotation>
<xs:documentation xml:lang="en">
List all available sources for a given sink.
</xs:documentation>
</xs:annotation>
<xs:complexType>
<xs:attribute name="type" type="mixType" use="required"/>
<xs:attribute name="sink" type="xs:string" use="required"/>
<xs:attribute name="sources" type="xs:string" use="required"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="volumes">
<xs:sequence>
<xs:element name="volume" type="volume" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="reference" type="reference" minOccurs="0" maxOccurs="unbounded">
</xs:element>
</xs:sequence>
</xs:complexType>
<!-- TODO: Always require a ref for better xsd validations.
Currently a volume could have no points nor ref
as it can not be forbidden by xsd 1.0.-->
<xs:simpleType name="volumePoint">
<xs:annotation>
<xs:documentation xml:lang="en">
Comma separated pair of number.
The fist one is the framework level (between 0 and 100).
The second one is the volume to send to the HAL.
The framework will interpolate volumes not specified.
Their MUST be at least 2 points specified.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:pattern value="([0-9]{1,2}|100),-?[0-9]+"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioStreamType">
<xs:annotation>
<xs:documentation xml:lang="en">
Audio stream type describing the intended use case of a stream.
Please consult frameworks/base/media/java/android/media/AudioSystem.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_STREAM_VOICE_CALL"/>
<xs:enumeration value="AUDIO_STREAM_SYSTEM"/>
<xs:enumeration value="AUDIO_STREAM_RING"/>
<xs:enumeration value="AUDIO_STREAM_MUSIC"/>
<xs:enumeration value="AUDIO_STREAM_ALARM"/>
<xs:enumeration value="AUDIO_STREAM_NOTIFICATION"/>
<xs:enumeration value="AUDIO_STREAM_BLUETOOTH_SCO"/>
<xs:enumeration value="AUDIO_STREAM_ENFORCED_AUDIBLE"/>
<xs:enumeration value="AUDIO_STREAM_DTMF"/>
<xs:enumeration value="AUDIO_STREAM_TTS"/>
<xs:enumeration value="AUDIO_STREAM_ACCESSIBILITY"/>
<xs:enumeration value="AUDIO_STREAM_ASSISTANT"/>
<xs:enumeration value="AUDIO_STREAM_REROUTING"/>
<xs:enumeration value="AUDIO_STREAM_PATCH"/>
<xs:enumeration value="AUDIO_STREAM_CALL_ASSISTANT"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioSource">
<xs:annotation>
<xs:documentation xml:lang="en">
An audio source defines the intended use case for the sound being recorded.
Please consult frameworks/base/media/java/android/media/MediaRecorder.java
for the description of each value.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_SOURCE_DEFAULT"/>
<xs:enumeration value="AUDIO_SOURCE_MIC"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_UPLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_DOWNLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_CALL"/>
<xs:enumeration value="AUDIO_SOURCE_CAMCORDER"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_RECOGNITION"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_COMMUNICATION"/>
<xs:enumeration value="AUDIO_SOURCE_REMOTE_SUBMIX"/>
<xs:enumeration value="AUDIO_SOURCE_UNPROCESSED"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_PERFORMANCE"/>
<xs:enumeration value="AUDIO_SOURCE_ECHO_REFERENCE"/>
<xs:enumeration value="AUDIO_SOURCE_FM_TUNER"/>
<xs:enumeration value="AUDIO_SOURCE_HOTWORD"/>
<xs:enumeration value="AUDIO_SOURCE_ULTRASOUND"/>
</xs:restriction>
</xs:simpleType>
<!-- Enum values of device_category from Volume.h. -->
<xs:simpleType name="deviceCategory">
<xs:restriction base="xs:string">
<xs:enumeration value="DEVICE_CATEGORY_HEADSET"/>
<xs:enumeration value="DEVICE_CATEGORY_SPEAKER"/>
<xs:enumeration value="DEVICE_CATEGORY_EARPIECE"/>
<xs:enumeration value="DEVICE_CATEGORY_EXT_MEDIA"/>
<xs:enumeration value="DEVICE_CATEGORY_HEARING_AID"/>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="volume">
<xs:annotation>
<xs:documentation xml:lang="en">
Volume section defines a volume curve for a given use case and device category.
It contains a list of points of this curve expressing the attenuation in Millibels
for a given volume index from 0 to 100.
<volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER">
<point>0,-9600</point>
<point>100,0</point>
</volume>
It may also reference a reference/@name to avoid duplicating curves.
<volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_SPEAKER"
ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
<reference name="DEFAULT_MEDIA_VOLUME_CURVE">
<point>0,-9600</point>
<point>100,0</point>
</reference>
</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="point" type="volumePoint" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="stream" type="audioStreamType"/>
<xs:attribute name="deviceCategory" type="deviceCategory"/>
<xs:attribute name="ref" type="xs:token" use="optional"/>
</xs:complexType>
<xs:complexType name="reference">
<xs:sequence>
<xs:element name="point" type="volumePoint" minOccurs="2" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
</xs:complexType>
<xs:complexType name="surroundSound">
<xs:annotation>
<xs:documentation xml:lang="en">
Surround Sound section provides configuration related to handling of
multi-channel formats.
</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="formats" type="surroundFormats"/>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="audioFormatsList">
<xs:list itemType="extendableAudioFormat" />
</xs:simpleType>
<xs:complexType name="surroundFormats">
<xs:sequence>
<xs:element name="format" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:attribute name="name" type="extendableAudioFormat" use="required"/>
<xs:attribute name="subformats" type="audioFormatsList" />
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="engineSuffix">
<xs:restriction base="xs:string">
<xs:enumeration value="default"/>
<xs:enumeration value="configurable"/>
</xs:restriction>
</xs:simpleType>
</xs:schema>

View File

@@ -0,0 +1,17 @@
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
xsd_config {
name: "audio_policy_capengine_configuration_aidl_default",
srcs: ["PolicyConfigurableDomains.xsd"],
package_name: "android.audio.policy.capengine.configuration",
nullability: true,
root_elements: ["ConfigurableDomains"],
}

View File

@@ -0,0 +1,467 @@
<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<!-- BEGIN W3cXmlAttributes.xsd -->
<xs:annotation>
<xs:documentation>
See http://www.w3.org/XML/1998/namespace.html and
http://www.w3.org/TR/REC-xml for information about this namespace.
This schema document describes the XML namespace, in a form
suitable for import by other schema documents.
Note that local names in this namespace are intended to be defined
only by the World Wide Web Consortium or its subgroups. The
following names are currently defined in this namespace and should
not be used with conflicting semantics by any Working Group,
specification, or document instance:
base (as an attribute name): denotes an attribute whose value
provides a URI to be used as the base for interpreting any
relative URIs in the scope of the element on which it
appears; its value is inherited. This name is reserved
by virtue of its definition in the XML Base specification.
id (as an attribute name): denotes an attribute whose value
should be interpreted as if declared to be of type ID.
The xml:id specification is not yet a W3C Recommendation,
but this attribute is included here to facilitate experimentation
with the mechanisms it proposes. Note that it is _not_ included
in the specialAttrs attribute group.
lang (as an attribute name): denotes an attribute whose value
is a language code for the natural language of the content of
any element; its value is inherited. This name is reserved
by virtue of its definition in the XML specification.
space (as an attribute name): denotes an attribute whose
value is a keyword indicating what whitespace processing
discipline is intended for the content of the element; its
value is inherited. This name is reserved by virtue of its
definition in the XML specification.
Father (in any context at all): denotes Jon Bosak, the chair of
the original XML Working Group. This name is reserved by
the following decision of the W3C XML Plenary and
XML Coordination groups:
In appreciation for his vision, leadership and dedication
the W3C XML Plenary on this 10th day of February, 2000
reserves for Jon Bosak in perpetuity the XML name
xml:Father
</xs:documentation>
</xs:annotation>
<xs:annotation>
<xs:documentation>This schema defines attributes and an attribute group
suitable for use by
schemas wishing to allow xml:base, xml:lang, xml:space or xml:id
attributes on elements they define.
To enable this, such a schema must import this schema
for the XML namespace, e.g. as follows:
&lt;schema . . .>
. . .
&lt;import namespace="http://www.w3.org/XML/1998/namespace"
schemaLocation="http://www.w3.org/2005/08/xml.xsd"/>
Subsequently, qualified reference to any of the attributes
or the group defined below will have the desired effect, e.g.
&lt;type . . .>
. . .
&lt;attributeGroup ref="xml:specialAttrs"/>
will define a type which will schema-validate an instance
element with any of those attributes</xs:documentation>
</xs:annotation>
<xs:annotation>
<xs:documentation>In keeping with the XML Schema WG's standard versioning
policy, this schema document will persist at
http://www.w3.org/2005/08/xml.xsd.
At the date of issue it can also be found at
http://www.w3.org/2001/xml.xsd.
The schema document at that URI may however change in the future,
in order to remain compatible with the latest version of XML Schema
itself, or with the XML namespace itself. In other words, if the XML
Schema or XML namespaces change, the version of this document at
http://www.w3.org/2001/xml.xsd will change
accordingly; the version at
http://www.w3.org/2005/08/xml.xsd will not change.
</xs:documentation>
</xs:annotation>
<xs:attribute name="lang">
<xs:annotation>
<xs:documentation>Attempting to install the relevant ISO 2- and 3-letter
codes as the enumerated possible values is probably never
going to be a realistic possibility. See
RFC 3066 at http://www.ietf.org/rfc/rfc3066.txt and the IANA registry
at http://www.iana.org/assignments/lang-tag-apps.htm for
further information.
The union allows for the 'un-declaration' of xml:lang with
the empty string.</xs:documentation>
</xs:annotation>
<xs:simpleType>
<xs:union memberTypes="xs:language">
<xs:simpleType name="langEnum">
<xs:restriction base="xs:string">
<xs:enumeration value=""/>
</xs:restriction>
</xs:simpleType>
</xs:union>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="space">
<xs:simpleType name="spaceEnum">
<xs:restriction base="xs:NCName">
<xs:enumeration value="default"/>
<xs:enumeration value="preserve"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="base" type="xs:anyURI">
<xs:annotation>
<xs:documentation>See http://www.w3.org/TR/xmlbase/ for
information about this attribute.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="id" type="xs:ID">
<xs:annotation>
<xs:documentation>See http://www.w3.org/TR/xml-id/ for
information about this attribute.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attributeGroup name="specialAttrs">
<xs:attribute ref="xml:base"/>
<xs:attribute ref="xml:lang"/>
<xs:attribute ref="xml:space"/>
</xs:attributeGroup>
<!-- END W3cXmlAttributes.xsd -->
<!-- BEGIN ParameterSettings.xsd -->
<!-- BUG b/147297854 - removed "abstract" from type definition -->
<xs:complexType name="BooleanParameterType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:extension>
<!--xs:restriction base="xs:string">
<xs:pattern value="([01][\s]*)+"/>
<xs:pattern value="((0x0|0x1)[\s]*)+"/>
<xs:attribute name="Name" type="xs:string" use="required"/>
</xs:restriction-->
</xs:simpleContent>
</xs:complexType>
<xs:complexType name="IntegerParameterType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:extension>
<!--xs:restriction base="xs:string">
<xs:pattern value="(0|([+-]?[1-9][0-9]*))(\s+(0|([+-]?[1-9][0-9]*)))*"/>
<xs:pattern value="(0x[0-9a-fA-F]+)(\s+(0x[0-9a-fA-F]+))*"/>
<xs:attribute name="Name" type="xs:string" use="required"/>
</xs:restriction-->
</xs:simpleContent>
</xs:complexType>
<xs:complexType name="EnumParameterType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:extension>
<!--xs:extension base="xs:string">
<xs:attribute name="Name" type="xs:string" use="required"/>
</xs:extension-->
</xs:simpleContent>
</xs:complexType>
<xs:complexType name="PointParameterType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:extension>
<!--xs:restriction base="xs:string">
<xs:pattern value="((0|[+-]?0\.[0-9]+|(([+-]?[1-9][0-9]*)(\.[0-9]+)?))([Ee][+-]?[0-9]+)?)(\s+(0|[+-]?0\.[0-9]+|(([+-]?[1-9][0-9]*)(\.[0-9]+)?))([Ee][+-]?[0-9]+)?)*"/>
<xs:pattern value="(0x[0-9a-fA-F]+)(\s+(0x[0-9a-fA-F]+))*"/>
<xs:attribute name="Name" type="xs:NMTOKEN" use="required"/>
</xs:restriction-->
</xs:simpleContent>
</xs:complexType>
<xs:complexType name="BitParameterBlockType">
<xs:sequence>
<xs:element name="BitParameter" maxOccurs="unbounded" type="IntegerParameterType"/>
</xs:sequence>
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:complexType>
<xs:complexType name="StringParameterType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="Name" type="ParameterNameEnumType" use="required"/>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
<xs:group name="ParameterBlockGroup">
<xs:choice>
<xs:element name="BooleanParameter" type="BooleanParameterType"/>
<xs:element name="IntegerParameter" type="IntegerParameterType"/>
<xs:element name="EnumParameter" type="EnumParameterType"/>
<xs:element name="FixedPointParameter" type="PointParameterType"/>
<xs:element name="FloatingPointParameter" type="PointParameterType"/>
<xs:element name="BitParameterBlock" type="BitParameterBlockType">
<xs:unique name="BitParameterBlockSubElementsUniqueness">
<xs:selector xpath="*"/>
<xs:field xpath="@Name"/>
</xs:unique>
</xs:element>
<xs:element name="StringParameter" type="StringParameterType"/>
<!--xs:element name="Component" type="ParameterBlockType"/-->
<xs:element name="ParameterBlock" type="ParameterBlockType">
<xs:unique name="ParameterBlockSubElementsUniqueness">
<xs:selector xpath="*"/>
<xs:field xpath="@Name"/>
</xs:unique>
</xs:element>
</xs:choice>
</xs:group>
<xs:complexType name="ParameterBlockType">
<xs:sequence>
<xs:group ref="ParameterBlockGroup" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="Name" type="xs:NMTOKEN" use="required"/>
</xs:complexType>
<!-- END ParameterSettings.xsd -->
<!-- BEGIN ConfigurableDomain.xsd -->
<xs:complexType name="SelectionCriterionRuleType">
<xs:attribute name="SelectionCriterion" type="xs:NMTOKEN" use="required"/>
<xs:attribute name="MatchesWhen" use="required">
<xs:simpleType name="MatchesWhenEnum">
<xs:restriction base="xs:NMTOKEN">
<xs:enumeration value="Is"/>
<xs:enumeration value="IsNot"/>
<xs:enumeration value="Includes"/>
<xs:enumeration value="Excludes"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="Value" use="required" type="xs:NMTOKEN"/>
</xs:complexType>
<xs:group name="RuleGroup">
<xs:choice>
<xs:element name="CompoundRule" type="CompoundRuleType"/>
<xs:element name="SelectionCriterionRule" type="SelectionCriterionRuleType"/>
</xs:choice>
</xs:group>
<xs:complexType name="CompoundRuleType">
<xs:sequence>
<xs:group ref="RuleGroup" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="Type">
<xs:simpleType name="TypeEnum">
<xs:restriction base="xs:NMTOKEN">
<xs:enumeration value="Any"/>
<xs:enumeration value="All"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:complexType>
<xs:complexType name="ConfigurationsType">
<xs:sequence>
<xs:element maxOccurs="unbounded" name="Configuration">
<xs:complexType>
<xs:sequence>
<xs:element name="CompoundRule" type="CompoundRuleType" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="Name" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:group name="ComponentGroup">
<xs:sequence>
<xs:group ref="ParameterBlockGroup"/>
</xs:sequence>
</xs:group>
<xs:complexType name="ComponentType">
<xs:sequence>
<xs:choice>
<xs:group ref="ComponentGroup" maxOccurs="unbounded"/>
<xs:element name="Subsystem" type="ComponentType" maxOccurs="unbounded"/>
</xs:choice>
</xs:sequence>
<xs:attribute name="Name" use="required" type="xs:NCName"/>
</xs:complexType>
<xs:complexType name="ConfigurableElementsType">
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" name="ConfigurableElement">
<xs:complexType>
<xs:attribute name="Path" use="required">
<xs:simpleType>
<xs:restriction base="xs:anyURI">
<xs:pattern value="/.*[^/]"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="ConfigurableElementSettingsType">
<xs:choice>
<xs:element name="BooleanParameter" type="BooleanParameterType"/>
<xs:element name="IntegerParameter" type="IntegerParameterType"/>
<xs:element name="EnumParameter" type="EnumParameterType"/>
<xs:element name="FixedPointParameter" type="PointParameterType"/>
<xs:element name="FloatingPointParameter" type="PointParameterType"/>
<xs:element name="BitParameter" type="IntegerParameterType"/>
<xs:element name="BitParameterBlock" type="BitParameterBlockType">
<xs:unique name="BitParameterBlockSubElementsUniqueness">
<xs:selector xpath="*"/>
<xs:field xpath="@Name"/>
</xs:unique>
</xs:element>
<xs:element name="StringParameter" type="StringParameterType"/>
<!--xs:element name="Component" type="ParameterBlockType"/-->
<xs:element name="ParameterBlock" type="ParameterBlockType">
<xs:unique name="ParameterBlockSubElementsUniqueness">
<xs:selector xpath="*"/>
<xs:field xpath="@Name"/>
</xs:unique>
</xs:element>
</xs:choice>
<!--xs:choice>
<xs:element name="BitParameter" type="IntegerParameterType"/>
<xs:group ref="ComponentGroup"/>
</xs:choice-->
<xs:attribute name="Path" use="required">
<xs:simpleType>
<xs:restriction base="xs:anyURI">
<xs:pattern value="/.*[^/]"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:complexType>
<xs:complexType name="SettingsType">
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" name="Configuration">
<xs:complexType>
<xs:sequence>
<xs:element name="ConfigurableElement" minOccurs="0" maxOccurs="unbounded" type="ConfigurableElementSettingsType"/>
</xs:sequence>
<xs:attribute name="Name" use="required" type="xs:NCName"/>
</xs:complexType>
<xs:unique name="ConfigurableElementUniqueness">
<xs:selector xpath="ConfigurableElement"/>
<xs:field xpath="@Path"/>
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="ConfigurableDomainType">
<xs:sequence>
<xs:element name="Configurations" type="ConfigurationsType"/>
<xs:element name="ConfigurableElements" type="ConfigurableElementsType"/>
<xs:element name="Settings" type="SettingsType" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="Name" use="required" type="xs:NCName"/>
<xs:attribute name="SequenceAware" use="optional" type="xs:boolean" default="false"/>
</xs:complexType>
<xs:element name="ConfigurableDomain" type="ConfigurableDomainType"/>
<!-- END ConfigurableDomain.xsd -->
<!-- BEGIN ConfigurableDomains.xsd -->
<xs:element name="ConfigurableDomains">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" name="ConfigurableDomain" type="ConfigurableDomainType">
<xs:key name="ConfigurableElementKey">
<xs:selector xpath="ConfigurableElements/ConfigurableElement"/>
<xs:field xpath="@Path"/>
</xs:key>
<xs:keyref refer="ConfigurableElementKey" name="ConfigurableDomainReference">
<xs:selector xpath="Settings/Configuration/ConfigurableElement"/>
<xs:field xpath="@Path"/>
</xs:keyref>
<xs:key name="ConfigurationKey">
<xs:selector xpath="Configurations/Configuration"/>
<xs:field xpath="@Name"/>
</xs:key>
<xs:keyref refer="ConfigurationKey" name="ConfigurationReference2">
<xs:selector xpath="ConfigurableElements/ConfigurableElement/Configuration"/>
<xs:field xpath="@Name"/>
</xs:keyref>
<xs:keyref refer="ConfigurationKey" name="ConfigurationReference">
<xs:selector xpath="Settings/Configuration"/>
<xs:field xpath="@Name"/>
</xs:keyref>
</xs:element>
</xs:sequence>
<xs:attribute name="SystemClassName" use="required" type="xs:NCName"/>
</xs:complexType>
<xs:unique name="ConfigurableDomainUniqueness">
<xs:selector xpath="ConfigurableDomain"/>
<xs:field xpath="@Name"/>
</xs:unique>
</xs:element>
<!-- END ConfigurableDomains.xsd -->
<xs:simpleType name="ParameterNameEnumType">
<xs:restriction base="xs:string">
<xs:enumeration value="volume_profile"/>
<xs:enumeration value="communication"/>
<xs:enumeration value="ambient"/>
<xs:enumeration value="builtin_mic"/>
<xs:enumeration value="bluetooth_sco_headset"/>
<xs:enumeration value="wired_headset"/>
<xs:enumeration value="hdmi"/>
<xs:enumeration value="telephony_rx"/>
<xs:enumeration value="back_mic"/>
<xs:enumeration value="remote_submix"/>
<xs:enumeration value="anlg_dock_headset"/>
<xs:enumeration value="dgtl_dock_headset"/>
<xs:enumeration value="usb_accessory"/>
<xs:enumeration value="usb_device"/>
<xs:enumeration value="fm_tuner"/>
<xs:enumeration value="tv_tuner"/>
<xs:enumeration value="line"/>
<xs:enumeration value="spdif"/>
<xs:enumeration value="bluetooth_a2dp" />
<xs:enumeration value="loopback" />
<xs:enumeration value="ip" />
<xs:enumeration value="bus" />
<xs:enumeration value="proxy"/>
<xs:enumeration value="usb_headset"/>
<xs:enumeration value="bluetooth_ble"/>
<xs:enumeration value="hdmi_arc"/>
<xs:enumeration value="echo_reference"/>
<xs:enumeration value="ble_headset"/>
<xs:enumeration value="stub"/>
<xs:enumeration value="hdmi_earc"/>
<xs:enumeration value="device_address"/>
<xs:enumeration value="earpiece" />
<xs:enumeration value="speaker" />
<xs:enumeration value="wired_headphone" />
<xs:enumeration value="bluetooth_sco" />
<xs:enumeration value="bluetooth_sco_carkit"/>
<xs:enumeration value="bluetooth_a2dp_headphones"/>
<xs:enumeration value="bluetooth_a2dp_speaker"/>
<xs:enumeration value="telephony_tx"/>
<xs:enumeration value="fm"/>
<xs:enumeration value="aux_line"/>
<xs:enumeration value="speaker_safe"/>
<xs:enumeration value="hearing_aid" />
<xs:enumeration value="echo_canceller" />
<xs:enumeration value="ble_speaker" />
<xs:enumeration value="ble_broadcast" />
</xs:restriction>
</xs:simpleType>
</xs:schema>

View File

@@ -0,0 +1,264 @@
// Signature format: 2.0
package android.audio.policy.capengine.configuration {
public class BitParameterBlockType {
ctor public BitParameterBlockType();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.IntegerParameterType> getBitParameter();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
}
public class BooleanParameterType {
ctor public BooleanParameterType();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method @Nullable public String getValue();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
method public void setValue(@Nullable String);
}
public class ComponentType {
ctor public ComponentType();
method @Nullable public String getName();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.ComponentType> getSubsystem_optional();
method public void setName(@Nullable String);
}
public class CompoundRuleType {
ctor public CompoundRuleType();
method @Nullable public android.audio.policy.capengine.configuration.CompoundRuleType getCompoundRule_optional();
method @Nullable public android.audio.policy.capengine.configuration.SelectionCriterionRuleType getSelectionCriterionRule_optional();
method @Nullable public android.audio.policy.capengine.configuration.TypeEnum getType();
method public void setCompoundRule_optional(@Nullable android.audio.policy.capengine.configuration.CompoundRuleType);
method public void setSelectionCriterionRule_optional(@Nullable android.audio.policy.capengine.configuration.SelectionCriterionRuleType);
method public void setType(@Nullable android.audio.policy.capengine.configuration.TypeEnum);
}
public class ConfigurableDomainType {
ctor public ConfigurableDomainType();
method @Nullable public android.audio.policy.capengine.configuration.ConfigurableElementsType getConfigurableElements();
method @Nullable public android.audio.policy.capengine.configuration.ConfigurationsType getConfigurations();
method @Nullable public String getName();
method @Nullable public boolean getSequenceAware();
method @Nullable public android.audio.policy.capengine.configuration.SettingsType getSettings();
method public void setConfigurableElements(@Nullable android.audio.policy.capengine.configuration.ConfigurableElementsType);
method public void setConfigurations(@Nullable android.audio.policy.capengine.configuration.ConfigurationsType);
method public void setName(@Nullable String);
method public void setSequenceAware(@Nullable boolean);
method public void setSettings(@Nullable android.audio.policy.capengine.configuration.SettingsType);
}
public class ConfigurableDomains {
ctor public ConfigurableDomains();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.ConfigurableDomainType> getConfigurableDomain();
method @Nullable public String getSystemClassName();
method public void setSystemClassName(@Nullable String);
}
public class ConfigurableElementSettingsType {
ctor public ConfigurableElementSettingsType();
method @Nullable public android.audio.policy.capengine.configuration.BitParameterBlockType getBitParameterBlock_optional();
method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getBitParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.BooleanParameterType getBooleanParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.EnumParameterType getEnumParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFixedPointParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFloatingPointParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getIntegerParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.ParameterBlockType getParameterBlock_optional();
method @Nullable public String getPath();
method @Nullable public android.audio.policy.capengine.configuration.StringParameterType getStringParameter_optional();
method public void setBitParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.BitParameterBlockType);
method public void setBitParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType);
method public void setBooleanParameter_optional(@Nullable android.audio.policy.capengine.configuration.BooleanParameterType);
method public void setEnumParameter_optional(@Nullable android.audio.policy.capengine.configuration.EnumParameterType);
method public void setFixedPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType);
method public void setFloatingPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType);
method public void setIntegerParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType);
method public void setParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.ParameterBlockType);
method public void setPath(@Nullable String);
method public void setStringParameter_optional(@Nullable android.audio.policy.capengine.configuration.StringParameterType);
}
public class ConfigurableElementsType {
ctor public ConfigurableElementsType();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.ConfigurableElementsType.ConfigurableElement> getConfigurableElement();
}
public static class ConfigurableElementsType.ConfigurableElement {
ctor public ConfigurableElementsType.ConfigurableElement();
method @Nullable public String getPath();
method public void setPath(@Nullable String);
}
public class ConfigurationsType {
ctor public ConfigurationsType();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.ConfigurationsType.Configuration> getConfiguration();
}
public static class ConfigurationsType.Configuration {
ctor public ConfigurationsType.Configuration();
method @Nullable public android.audio.policy.capengine.configuration.CompoundRuleType getCompoundRule();
method @Nullable public String getName();
method public void setCompoundRule(@Nullable android.audio.policy.capengine.configuration.CompoundRuleType);
method public void setName(@Nullable String);
}
public class EnumParameterType {
ctor public EnumParameterType();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method @Nullable public String getValue();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
method public void setValue(@Nullable String);
}
public class IntegerParameterType {
ctor public IntegerParameterType();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method @Nullable public String getValue();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
method public void setValue(@Nullable String);
}
public enum LangEnum {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.capengine.configuration.LangEnum EMPTY;
}
public enum MatchesWhenEnum {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Excludes;
enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Includes;
enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum Is;
enum_constant public static final android.audio.policy.capengine.configuration.MatchesWhenEnum IsNot;
}
public class ParameterBlockType {
ctor public ParameterBlockType();
method @Nullable public android.audio.policy.capengine.configuration.BitParameterBlockType getBitParameterBlock_optional();
method @Nullable public android.audio.policy.capengine.configuration.BooleanParameterType getBooleanParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.EnumParameterType getEnumParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFixedPointParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.PointParameterType getFloatingPointParameter_optional();
method @Nullable public android.audio.policy.capengine.configuration.IntegerParameterType getIntegerParameter_optional();
method @Nullable public String getName();
method @Nullable public android.audio.policy.capengine.configuration.ParameterBlockType getParameterBlock_optional();
method @Nullable public android.audio.policy.capengine.configuration.StringParameterType getStringParameter_optional();
method public void setBitParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.BitParameterBlockType);
method public void setBooleanParameter_optional(@Nullable android.audio.policy.capengine.configuration.BooleanParameterType);
method public void setEnumParameter_optional(@Nullable android.audio.policy.capengine.configuration.EnumParameterType);
method public void setFixedPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType);
method public void setFloatingPointParameter_optional(@Nullable android.audio.policy.capengine.configuration.PointParameterType);
method public void setIntegerParameter_optional(@Nullable android.audio.policy.capengine.configuration.IntegerParameterType);
method public void setName(@Nullable String);
method public void setParameterBlock_optional(@Nullable android.audio.policy.capengine.configuration.ParameterBlockType);
method public void setStringParameter_optional(@Nullable android.audio.policy.capengine.configuration.StringParameterType);
}
public enum ParameterNameEnumType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ambient;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType anlg_dock_headset;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType aux_line;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType back_mic;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_broadcast;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_headset;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ble_speaker;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp_headphones;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_a2dp_speaker;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_ble;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco_carkit;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bluetooth_sco_headset;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType builtin_mic;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType bus;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType communication;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType device_address;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType dgtl_dock_headset;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType earpiece;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType echo_canceller;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType echo_reference;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType fm;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType fm_tuner;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi_arc;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hdmi_earc;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType hearing_aid;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType ip;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType line;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType loopback;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType proxy;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType remote_submix;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType spdif;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType speaker;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType speaker_safe;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType stub;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType telephony_rx;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType telephony_tx;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType tv_tuner;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_accessory;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_device;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType usb_headset;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType volume_profile;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType wired_headphone;
enum_constant public static final android.audio.policy.capengine.configuration.ParameterNameEnumType wired_headset;
}
public class PointParameterType {
ctor public PointParameterType();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method @Nullable public String getValue();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
method public void setValue(@Nullable String);
}
public class SelectionCriterionRuleType {
ctor public SelectionCriterionRuleType();
method @Nullable public android.audio.policy.capengine.configuration.MatchesWhenEnum getMatchesWhen();
method @Nullable public String getSelectionCriterion();
method @Nullable public String getValue();
method public void setMatchesWhen(@Nullable android.audio.policy.capengine.configuration.MatchesWhenEnum);
method public void setSelectionCriterion(@Nullable String);
method public void setValue(@Nullable String);
}
public class SettingsType {
ctor public SettingsType();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.SettingsType.Configuration> getConfiguration();
}
public static class SettingsType.Configuration {
ctor public SettingsType.Configuration();
method @Nullable public java.util.List<android.audio.policy.capengine.configuration.ConfigurableElementSettingsType> getConfigurableElement();
method @Nullable public String getName();
method public void setName(@Nullable String);
}
public enum SpaceEnum {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.capengine.configuration.SpaceEnum _default;
enum_constant public static final android.audio.policy.capengine.configuration.SpaceEnum preserve;
}
public class StringParameterType {
ctor public StringParameterType();
method @Nullable public android.audio.policy.capengine.configuration.ParameterNameEnumType getName();
method @Nullable public String getValue();
method public void setName(@Nullable android.audio.policy.capengine.configuration.ParameterNameEnumType);
method public void setValue(@Nullable String);
}
public enum TypeEnum {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.capengine.configuration.TypeEnum All;
enum_constant public static final android.audio.policy.capengine.configuration.TypeEnum Any;
}
public class XmlParser {
ctor public XmlParser();
method @Nullable public static android.audio.policy.capengine.configuration.ConfigurableDomains readConfigurableDomains(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
}
}

View File

@@ -0,0 +1 @@
// Signature format: 2.0

View File

@@ -0,0 +1,16 @@
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
xsd_config {
name: "audio_policy_engine_configuration_aidl_default",
srcs: ["audio_policy_engine_configuration.xsd"],
package_name: "android.audio.policy.engine.configuration",
nullability: true,
}

View File

@@ -0,0 +1,346 @@
// Signature format: 2.0
package android.audio.policy.engine.configuration {
public class AttributesGroup {
ctor public AttributesGroup();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.AttributesType> getAttributes_optional();
method @Nullable public android.audio.policy.engine.configuration.BundleType getBundle_optional();
method @Nullable public android.audio.policy.engine.configuration.ContentTypeType getContentType_optional();
method @Nullable public android.audio.policy.engine.configuration.FlagsType getFlags_optional();
method @Nullable public android.audio.policy.engine.configuration.SourceType getSource_optional();
method @Nullable public android.audio.policy.engine.configuration.Stream getStreamType();
method @Nullable public android.audio.policy.engine.configuration.UsageType getUsage_optional();
method @Nullable public String getVolumeGroup();
method public void setBundle_optional(@Nullable android.audio.policy.engine.configuration.BundleType);
method public void setContentType_optional(@Nullable android.audio.policy.engine.configuration.ContentTypeType);
method public void setFlags_optional(@Nullable android.audio.policy.engine.configuration.FlagsType);
method public void setSource_optional(@Nullable android.audio.policy.engine.configuration.SourceType);
method public void setStreamType(@Nullable android.audio.policy.engine.configuration.Stream);
method public void setUsage_optional(@Nullable android.audio.policy.engine.configuration.UsageType);
method public void setVolumeGroup(@Nullable String);
}
public class AttributesRef {
ctor public AttributesRef();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.AttributesRefType> getReference();
}
public class AttributesRefType {
ctor public AttributesRefType();
method @Nullable public android.audio.policy.engine.configuration.AttributesType getAttributes();
method @Nullable public String getName();
method public void setAttributes(@Nullable android.audio.policy.engine.configuration.AttributesType);
method public void setName(@Nullable String);
}
public class AttributesType {
ctor public AttributesType();
method @Nullable public String getAttributesRef();
method @Nullable public android.audio.policy.engine.configuration.BundleType getBundle();
method @Nullable public android.audio.policy.engine.configuration.ContentTypeType getContentType();
method @Nullable public android.audio.policy.engine.configuration.FlagsType getFlags();
method @Nullable public android.audio.policy.engine.configuration.SourceType getSource();
method @Nullable public android.audio.policy.engine.configuration.UsageType getUsage();
method public void setAttributesRef(@Nullable String);
method public void setBundle(@Nullable android.audio.policy.engine.configuration.BundleType);
method public void setContentType(@Nullable android.audio.policy.engine.configuration.ContentTypeType);
method public void setFlags(@Nullable android.audio.policy.engine.configuration.FlagsType);
method public void setSource(@Nullable android.audio.policy.engine.configuration.SourceType);
method public void setUsage(@Nullable android.audio.policy.engine.configuration.UsageType);
}
public class BundleType {
ctor public BundleType();
method @Nullable public String getKey();
method @Nullable public String getValue();
method public void setKey(@Nullable String);
method public void setValue(@Nullable String);
}
public class Configuration {
ctor public Configuration();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.AttributesRef> getAttributesRef();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.CriteriaType> getCriteria();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.CriterionTypesType> getCriterion_types();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.ProductStrategies> getProductStrategies();
method @Nullable public android.audio.policy.engine.configuration.Version getVersion();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.VolumeGroupsType> getVolumeGroups();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.VolumesType> getVolumes();
method public void setVersion(@Nullable android.audio.policy.engine.configuration.Version);
}
public enum ContentType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_MOVIE;
enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_MUSIC;
enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_SONIFICATION;
enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_SPEECH;
enum_constant public static final android.audio.policy.engine.configuration.ContentType AUDIO_CONTENT_TYPE_UNKNOWN;
}
public class ContentTypeType {
ctor public ContentTypeType();
method @Nullable public android.audio.policy.engine.configuration.ContentType getValue();
method public void setValue(@Nullable android.audio.policy.engine.configuration.ContentType);
}
public class CriteriaType {
ctor public CriteriaType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.CriterionType> getCriterion();
}
public class CriterionType {
ctor public CriterionType();
method @Nullable public String getName();
method @Nullable public String getType();
method @Nullable public String get_default();
method public void setName(@Nullable String);
method public void setType(@Nullable String);
method public void set_default(@Nullable String);
}
public class CriterionTypeType {
ctor public CriterionTypeType();
method @Nullable public String getName();
method @Nullable public android.audio.policy.engine.configuration.PfwCriterionTypeEnum getType();
method @Nullable public android.audio.policy.engine.configuration.ValuesType getValues();
method public void setName(@Nullable String);
method public void setType(@Nullable android.audio.policy.engine.configuration.PfwCriterionTypeEnum);
method public void setValues(@Nullable android.audio.policy.engine.configuration.ValuesType);
}
public class CriterionTypesType {
ctor public CriterionTypesType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.CriterionTypeType> getCriterion_type();
}
public enum DeviceCategory {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_EARPIECE;
enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_EXT_MEDIA;
enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_HEADSET;
enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_HEARING_AID;
enum_constant public static final android.audio.policy.engine.configuration.DeviceCategory DEVICE_CATEGORY_SPEAKER;
}
public enum FlagType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_AUDIBILITY_ENFORCED;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BEACON;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_BYPASS_MUTE;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_CAPTURE_PRIVATE;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_DEEP_BUFFER;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_HW_AV_SYNC;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_HW_HOTWORD;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_LOW_LATENCY;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_MUTE_HAPTIC;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NONE;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NO_MEDIA_PROJECTION;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_NO_SYSTEM_CAPTURE;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_SCO;
enum_constant public static final android.audio.policy.engine.configuration.FlagType AUDIO_FLAG_SECURE;
}
public class FlagsType {
ctor public FlagsType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.FlagType> getValue();
method public void setValue(@Nullable java.util.List<android.audio.policy.engine.configuration.FlagType>);
}
public enum ForcedConfigCommunicationDeviceType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType BT_BLE;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType BT_SCO;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType NONE;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType SPEAKER;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigCommunicationDeviceType WIRED_ACCESSORY;
}
public enum ForcedConfigDockType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType ANALOG_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType BT_CAR_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType BT_DESK_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType DIGITAL_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType NONE;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigDockType WIRED_ACCESSORY;
}
public enum ForcedConfigMediaDeviceType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType ANALOG_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType BT_A2DP;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType DIGITAL_DOCK;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType HEADPHONES;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType NONE;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType NO_BT_A2DP;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType SPEAKER;
enum_constant public static final android.audio.policy.engine.configuration.ForcedConfigMediaDeviceType WIRED_ACCESSORY;
}
public enum ForcedEncodingSourroundConfigType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType ALWAYS;
enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType MANUAL;
enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType NEVER;
enum_constant public static final android.audio.policy.engine.configuration.ForcedEncodingSourroundConfigType UNSPECIFIED;
}
public enum PfwCriterionTypeEnum {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.PfwCriterionTypeEnum exclusive;
enum_constant public static final android.audio.policy.engine.configuration.PfwCriterionTypeEnum inclusive;
}
public class ProductStrategies {
ctor public ProductStrategies();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.ProductStrategies.ProductStrategy> getProductStrategy();
}
public static class ProductStrategies.ProductStrategy {
ctor public ProductStrategies.ProductStrategy();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.AttributesGroup> getAttributesGroup();
method @Nullable public int getId();
method @Nullable public String getName();
method public void setId(@Nullable int);
method public void setName(@Nullable String);
}
public enum SourceEnumType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_CAMCORDER;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_DEFAULT;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_ECHO_REFERENCE;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_FM_TUNER;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_MIC;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_REMOTE_SUBMIX;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_UNPROCESSED;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_CALL;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_COMMUNICATION;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_DOWNLINK;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_PERFORMANCE;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_RECOGNITION;
enum_constant public static final android.audio.policy.engine.configuration.SourceEnumType AUDIO_SOURCE_VOICE_UPLINK;
}
public class SourceType {
ctor public SourceType();
method @Nullable public android.audio.policy.engine.configuration.SourceEnumType getValue();
method public void setValue(@Nullable android.audio.policy.engine.configuration.SourceEnumType);
}
public enum Stream {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ACCESSIBILITY;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ALARM;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ASSISTANT;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_BLUETOOTH_SCO;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_DEFAULT;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_DTMF;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_ENFORCED_AUDIBLE;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_MUSIC;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_NOTIFICATION;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_RING;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_SYSTEM;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_TTS;
enum_constant public static final android.audio.policy.engine.configuration.Stream AUDIO_STREAM_VOICE_CALL;
}
public enum UsageEnumType {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ALARM;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ANNOUNCEMENT;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANCE_SONIFICATION;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_ASSISTANT;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_CALL_ASSISTANT;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_EMERGENCY;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_GAME;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_MEDIA;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_EVENT;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_SAFETY;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_SPEAKER_CLEANUP;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_UNKNOWN;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VEHICLE_STATUS;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VIRTUAL_SOURCE;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VOICE_COMMUNICATION;
enum_constant public static final android.audio.policy.engine.configuration.UsageEnumType AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
}
public class UsageType {
ctor public UsageType();
method @Nullable public android.audio.policy.engine.configuration.UsageEnumType getValue();
method public void setValue(@Nullable android.audio.policy.engine.configuration.UsageEnumType);
}
public class ValueType {
ctor public ValueType();
method @Nullable public String getAndroid_type();
method @Nullable public String getLiteral();
method public void setAndroid_type(@Nullable String);
method public void setLiteral(@Nullable String);
}
public class ValuesType {
ctor public ValuesType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.ValueType> getValue();
}
public enum Version {
method @NonNull public String getRawName();
enum_constant public static final android.audio.policy.engine.configuration.Version _1_0;
}
public class Volume {
ctor public Volume();
method @Nullable public android.audio.policy.engine.configuration.DeviceCategory getDeviceCategory();
method @Nullable public java.util.List<java.lang.String> getPoint();
method @Nullable public String getRef();
method public void setDeviceCategory(@Nullable android.audio.policy.engine.configuration.DeviceCategory);
method public void setRef(@Nullable String);
}
public class VolumeGroupsType {
ctor public VolumeGroupsType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.VolumeGroupsType.VolumeGroup> getVolumeGroup();
}
public static class VolumeGroupsType.VolumeGroup {
ctor public VolumeGroupsType.VolumeGroup();
method @Nullable public int getIndexMax();
method @Nullable public int getIndexMin();
method @Nullable public String getName();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.Volume> getVolume();
method public void setIndexMax(@Nullable int);
method public void setIndexMin(@Nullable int);
method public void setName(@Nullable String);
}
public class VolumeRef {
ctor public VolumeRef();
method @Nullable public String getName();
method @Nullable public java.util.List<java.lang.String> getPoint();
method public void setName(@Nullable String);
}
public class VolumesType {
ctor public VolumesType();
method @Nullable public java.util.List<android.audio.policy.engine.configuration.VolumeRef> getReference();
}
public class XmlParser {
ctor public XmlParser();
method @Nullable public static android.audio.policy.engine.configuration.Configuration read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
}
}

View File

@@ -0,0 +1 @@
// Signature format: 2.0

View File

@@ -0,0 +1,479 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!-- Copyright (C) 2019 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<xs:schema version="2.0"
elementFormDefault="qualified"
attributeFormDefault="unqualified"
xmlns:xs="http://www.w3.org/2001/XMLSchema">
<!-- List the config versions supported by audio policy engine. -->
<xs:simpleType name="version">
<xs:restriction base="xs:decimal">
<xs:enumeration value="1.0"/>
</xs:restriction>
</xs:simpleType>
<xs:element name="configuration">
<xs:complexType>
<xs:sequence>
<xs:element name="ProductStrategies" type="ProductStrategies" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="criterion_types" type="criterionTypesType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="criteria" type="criteriaType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="volumeGroups" type="volumeGroupsType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="volumes" type="volumesType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="attributesRef" type="attributesRef" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="version" type="version" use="required"/>
</xs:complexType>
<xs:key name="volumeCurveNameKey">
<xs:selector xpath="volumes/reference"/>
<xs:field xpath="@name"/>
</xs:key>
<xs:keyref name="volumeCurveRef" refer="volumeCurveNameKey">
<xs:selector xpath="volumeGroups/volumeGroup"/>
<xs:field xpath="@ref"/>
</xs:keyref>
<xs:key name="attributesRefNameKey">
<xs:selector xpath="attributesRef/reference"/>
<xs:field xpath="@name"/>
</xs:key>
<xs:keyref name="volumeGroupAttributesRef" refer="attributesRefNameKey">
<xs:selector xpath="volumeGroups/volumeGroup/volume"/>
<xs:field xpath="@attributesRef"/>
</xs:keyref>
<xs:keyref name="ProductStrategyAttributesRef" refer="attributesRefNameKey">
<xs:selector xpath="ProductStrategies/ProductStrategy/Attributes"/>
<xs:field xpath="@attributesRef"/>
</xs:keyref>
<xs:unique name="productStrategyNameUniqueness">
<xs:selector xpath="ProductStrategies/ProductStrategy"/>
<xs:field xpath="@name"/>
</xs:unique>
<!-- ensure validity of volume group referred in product strategy-->
<xs:key name="volumeGroupKey">
<xs:selector xpath="volumeGroups/volumeGroup/name"/>
<xs:field xpath="."/>
</xs:key>
<xs:keyref name="volumeGroupRef" refer="volumeGroupKey">
<xs:selector xpath="ProductStrategies/ProductStrategy/AttributesGroup"/>
<xs:field xpath="@volumeGroup"/>
</xs:keyref>
<xs:unique name="volumeTargetUniqueness">
<xs:selector xpath="volumeGroups/volumeGroup"/>
<xs:field xpath="@name"/>
<xs:field xpath="@deviceCategory"/>
</xs:unique>
<!-- ensure validity of criterion type referred in criterion-->
<xs:key name="criterionTypeKey">
<xs:selector xpath="criterion_types/criterion_type"/>
<xs:field xpath="@name"/>
</xs:key>
<xs:keyref name="criterionTypeKeyRef" refer="criterionTypeKey">
<xs:selector xpath="criteria/criterion"/>
<xs:field xpath="@type"/>
</xs:keyref>
</xs:element>
<xs:complexType name="ProductStrategies">
<xs:annotation>
<xs:documentation xml:lang="en">
</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="ProductStrategy" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="AttributesGroup" type="AttributesGroup" minOccurs="1" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required"/>
<!-- Only needs to be specified for vendor strategies. -->
<xs:attribute name="id" type="xs:int" use="optional"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="AttributesGroup">
<xs:sequence>
<xs:choice minOccurs="0">
<xs:element name="Attributes" type="AttributesType" minOccurs="1" maxOccurs="unbounded"/>
<xs:sequence>
<xs:element name="ContentType" type="ContentTypeType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Usage" type="UsageType" minOccurs="1" maxOccurs="1"/>
<xs:element name="Source" type="SourceType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Flags" type="FlagsType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Bundle" type="BundleType" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
</xs:choice>
</xs:sequence>
<xs:attribute name="streamType" type="stream" use="optional"/>
<xs:attribute name="volumeGroup" type="xs:string" use="optional"/>
</xs:complexType>
<xs:complexType name="volumeGroupsType">
<xs:sequence>
<xs:element name="volumeGroup" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="name" type="xs:token"/>
<xs:element name="indexMin" type="xs:int" minOccurs="0" maxOccurs="1"/>
<xs:element name="indexMax" type="xs:int" minOccurs="0" maxOccurs="1"/>
<xs:element name="volume" type="volume" minOccurs="1" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:unique name="volumeAttributesUniqueness">
<xs:selector xpath="volume"/>
<xs:field xpath="deviceCategory"/>
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:complexType name="volumesType">
<xs:sequence>
<xs:element name="reference" type="volumeRef" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="attributesRef">
<xs:sequence>
<xs:element name="reference" type="attributesRefType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="criteriaType">
<xs:sequence>
<xs:element name="criterion" type="criterionType" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="criterionType">
<xs:attribute name="name" type="xs:string" use="required"/>
<xs:attribute name="type" type="xs:string" use="required"/>
<xs:attribute name="default" type="xs:string" use="optional"/>
</xs:complexType>
<xs:complexType name="criterionTypesType">
<xs:sequence>
<xs:element name="criterion_type" type="criterionTypeType" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="criterionTypeType">
<xs:sequence>
<xs:element name="values" type="valuesType" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
<xs:attribute name="type" type="pfwCriterionTypeEnum" use="required"/>
</xs:complexType>
<xs:complexType name="valuesType">
<xs:sequence>
<xs:element name="value" type="valueType" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="valueType">
<xs:annotation>
<xs:documentation xml:lang="en">
Criterion type is provided as a pair of 'human readable' string (referred as the
literal part, that will allow to express 'human readable' rules and an optional
android type.
This android type is reserved for device type mapping with parameter framework
representation on a bitfield (Only one bit is expected to represent a device) and
android representation of a type that may use several bits.
The lookup table will allow wrap android device type to parameter framework device
types data model.
</xs:documentation>
</xs:annotation>
<xs:attribute name="literal" type="xs:string" use="required"/>
<xs:attribute name="android_type" type="longDecimalOrHexType" use="optional"/>
</xs:complexType>
<xs:simpleType name="longDecimalOrHexType">
<xs:union memberTypes="xs:long longHexType" />
</xs:simpleType>
<xs:simpleType name="longHexType">
<xs:restriction base="xs:string">
<xs:pattern value="0x[0-9A-Fa-f]{1,16}"/>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="attributesRefType">
<xs:sequence>
<xs:element name="Attributes" type="AttributesType" minOccurs="1" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
</xs:complexType>
<xs:complexType name="AttributesType">
<xs:sequence>
<xs:element name="ContentType" type="ContentTypeType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Usage" type="UsageType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Source" type="SourceType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Flags" type="FlagsType" minOccurs="0" maxOccurs="1"/>
<xs:element name="Bundle" type="BundleType" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="attributesRef" type="xs:token" use="optional"/>
<!-- with xsd 1.1, it is impossible to make choice on either attributes or element...-->
</xs:complexType>
<xs:complexType name="ContentTypeType">
<xs:attribute name="value" type="contentType" use="required"/>
</xs:complexType>
<xs:complexType name="UsageType">
<xs:attribute name="value" type="usageEnumType" use="required"/>
</xs:complexType>
<xs:complexType name="SourceType">
<xs:attribute name="value" type="sourceEnumType" use="required"/>
</xs:complexType>
<xs:complexType name="FlagsType">
<xs:attribute name="value" type="flagsEnumType" use="required"/>
</xs:complexType>
<xs:complexType name="BundleType">
<xs:attribute name="key" type="xs:string" use="required"/>
<xs:attribute name="value" type="xs:string" use="required"/>
</xs:complexType>
<xs:complexType name="volume">
<xs:annotation>
<xs:documentation xml:lang="en">
Volume section defines a volume curve for a given use case and device category.
It contains a list of points of this curve expressing the attenuation in Millibels
for a given volume index from 0 to 100.
<volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
<point>0,-9600</point>
<point>100,0</point>
</volume>
It may also reference a reference/@name to avoid duplicating curves.
<volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
<reference name="DEFAULT_MEDIA_VOLUME_CURVE">
<point>0,-9600</point>
<point>100,0</point>
</reference>
</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="point" type="volumePoint" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="deviceCategory" type="deviceCategory"/>
<xs:attribute name="ref" type="xs:token" use="optional"/>
</xs:complexType>
<xs:complexType name="volumeRef">
<xs:sequence>
<xs:element name="point" type="volumePoint" minOccurs="2" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
</xs:complexType>
<xs:simpleType name="volumePoint">
<xs:annotation>
<xs:documentation xml:lang="en">
Comma separated pair of number.
The fist one is the framework level (between 0 and 100).
The second one is the volume to send to the HAL.
The framework will interpolate volumes not specified.
Their MUST be at least 2 points specified.
</xs:documentation>
</xs:annotation>
<xs:restriction base="xs:string">
<xs:pattern value="([0-9]{1,2}|100),-?[0-9]+"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="streamsCsv">
<xs:list>
<xs:simpleType>
<xs:restriction base="stream">
</xs:restriction>
</xs:simpleType>
</xs:list>
</xs:simpleType>
<!-- Enum values of audio_stream_type_t in audio-base.h
TODO: avoid manual sync. -->
<xs:simpleType name="stream">
<xs:restriction base="xs:NMTOKEN">
<!--xs:pattern value="\c+(,\c+)*"/-->
<xs:enumeration value="AUDIO_STREAM_DEFAULT"/>
<xs:enumeration value="AUDIO_STREAM_VOICE_CALL"/>
<xs:enumeration value="AUDIO_STREAM_SYSTEM"/>
<xs:enumeration value="AUDIO_STREAM_RING"/>
<xs:enumeration value="AUDIO_STREAM_MUSIC"/>
<xs:enumeration value="AUDIO_STREAM_ALARM"/>
<xs:enumeration value="AUDIO_STREAM_NOTIFICATION"/>
<xs:enumeration value="AUDIO_STREAM_BLUETOOTH_SCO"/>
<xs:enumeration value="AUDIO_STREAM_ENFORCED_AUDIBLE"/>
<xs:enumeration value="AUDIO_STREAM_DTMF"/>
<xs:enumeration value="AUDIO_STREAM_TTS"/>
<xs:enumeration value="AUDIO_STREAM_ACCESSIBILITY"/>
<xs:enumeration value="AUDIO_STREAM_ASSISTANT"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="deviceCategory">
<xs:restriction base="xs:string">
<xs:enumeration value="DEVICE_CATEGORY_HEADSET"/>
<xs:enumeration value="DEVICE_CATEGORY_SPEAKER"/>
<xs:enumeration value="DEVICE_CATEGORY_EARPIECE"/>
<xs:enumeration value="DEVICE_CATEGORY_EXT_MEDIA"/>
<xs:enumeration value="DEVICE_CATEGORY_HEARING_AID"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="contentType">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_CONTENT_TYPE_UNKNOWN"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_SPEECH"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_MUSIC"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_MOVIE"/>
<xs:enumeration value="AUDIO_CONTENT_TYPE_SONIFICATION"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="usageEnumType">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_USAGE_UNKNOWN"/>
<xs:enumeration value="AUDIO_USAGE_MEDIA"/>
<xs:enumeration value="AUDIO_USAGE_VOICE_COMMUNICATION"/>
<xs:enumeration value="AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING"/>
<xs:enumeration value="AUDIO_USAGE_ALARM"/>
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION"/>
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE"/>
<!-- Note: the following 3 values were deprecated in Android T (13) SDK -->
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/>
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/>
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/>
<xs:enumeration value="AUDIO_USAGE_NOTIFICATION_EVENT"/>
<xs:enumeration value="AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY"/>
<xs:enumeration value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/>
<xs:enumeration value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/>
<xs:enumeration value="AUDIO_USAGE_GAME"/>
<xs:enumeration value="AUDIO_USAGE_VIRTUAL_SOURCE"/>
<xs:enumeration value="AUDIO_USAGE_ASSISTANT"/>
<xs:enumeration value="AUDIO_USAGE_CALL_ASSISTANT"/>
<xs:enumeration value="AUDIO_USAGE_EMERGENCY" />
<xs:enumeration value="AUDIO_USAGE_SAFETY" />
<xs:enumeration value="AUDIO_USAGE_VEHICLE_STATUS" />
<xs:enumeration value="AUDIO_USAGE_ANNOUNCEMENT" />
<xs:enumeration value="AUDIO_USAGE_SPEAKER_CLEANUP" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="flagsEnumType">
<xs:list>
<xs:simpleType>
<xs:restriction base="flagType">
</xs:restriction>
</xs:simpleType>
</xs:list>
</xs:simpleType>
<xs:simpleType name="flagType">
<xs:restriction base="xs:NMTOKEN">
<xs:enumeration value="AUDIO_FLAG_NONE"/>
<xs:enumeration value="AUDIO_FLAG_AUDIBILITY_ENFORCED"/>
<xs:enumeration value="AUDIO_FLAG_SECURE"/>
<xs:enumeration value="AUDIO_FLAG_SCO"/>
<xs:enumeration value="AUDIO_FLAG_BEACON"/>
<xs:enumeration value="AUDIO_FLAG_HW_AV_SYNC"/>
<xs:enumeration value="AUDIO_FLAG_HW_HOTWORD"/>
<xs:enumeration value="AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY"/>
<xs:enumeration value="AUDIO_FLAG_BYPASS_MUTE"/>
<xs:enumeration value="AUDIO_FLAG_LOW_LATENCY"/>
<xs:enumeration value="AUDIO_FLAG_DEEP_BUFFER"/>
<xs:enumeration value="AUDIO_FLAG_NO_MEDIA_PROJECTION"/>
<xs:enumeration value="AUDIO_FLAG_MUTE_HAPTIC"/>
<xs:enumeration value="AUDIO_FLAG_NO_SYSTEM_CAPTURE"/>
<xs:enumeration value="AUDIO_FLAG_CAPTURE_PRIVATE"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="forcedConfigCommunicationDeviceType">
<xs:restriction base="xs:string">
<xs:enumeration value="NONE"/>
<xs:enumeration value="SPEAKER"/>
<xs:enumeration value="BT_SCO"/>
<xs:enumeration value="BT_BLE"/>
<xs:enumeration value="WIRED_ACCESSORY"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="forcedConfigMediaDeviceType">
<xs:restriction base="xs:string">
<xs:enumeration value="NONE"/>
<xs:enumeration value="SPEAKER"/>
<xs:enumeration value="HEADPHONES"/>
<xs:enumeration value="BT_A2DP"/>
<xs:enumeration value="ANALOG_DOCK"/>
<xs:enumeration value="DIGITAL_DOCK"/>
<xs:enumeration value="WIRED_ACCESSORY"/>
<xs:enumeration value="NO_BT_A2DP"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="forcedConfigDockType">
<xs:restriction base="xs:string">
<xs:enumeration value="NONE"/>
<xs:enumeration value="BT_CAR_DOCK"/>
<xs:enumeration value="BT_DESK_DOCK"/>
<xs:enumeration value="ANALOG_DOCK"/>
<xs:enumeration value="DIGITAL_DOCK"/>
<xs:enumeration value="WIRED_ACCESSORY"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="forcedEncodingSourroundConfigType">
<xs:restriction base="xs:string">
<xs:enumeration value="UNSPECIFIED"/>
<xs:enumeration value="NEVER"/>
<xs:enumeration value="ALWAYS"/>
<xs:enumeration value="MANUAL"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="sourceEnumType">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIO_SOURCE_DEFAULT"/>
<xs:enumeration value="AUDIO_SOURCE_MIC"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_UPLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_DOWNLINK"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_CALL"/>
<xs:enumeration value="AUDIO_SOURCE_CAMCORDER"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_RECOGNITION"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_COMMUNICATION"/>
<xs:enumeration value="AUDIO_SOURCE_REMOTE_SUBMIX"/>
<xs:enumeration value="AUDIO_SOURCE_UNPROCESSED"/>
<xs:enumeration value="AUDIO_SOURCE_VOICE_PERFORMANCE"/>
<xs:enumeration value="AUDIO_SOURCE_ECHO_REFERENCE"/>
<xs:enumeration value="AUDIO_SOURCE_FM_TUNER"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="pfwCriterionTypeEnum">
<xs:restriction base="xs:string">
<xs:enumeration value="inclusive"/>
<xs:enumeration value="exclusive"/>
</xs:restriction>
</xs:simpleType>
</xs:schema>

View File

@@ -0,0 +1,271 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <limits>
#define LOG_TAG "AHAL_StreamSwitcher"
#include <Utils.h>
#include <android-base/logging.h>
#include <error/expected_utils.h>
#include "core-impl/StreamStub.h"
#include "deprecated/StreamSwitcher.h"
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::media::audio::common::AudioDevice;
namespace aidl::android::hardware::audio::core::deprecated {
StreamSwitcher::StreamSwitcher(StreamContext* context, const Metadata& metadata)
: mContext(context),
mMetadata(metadata),
mStream(new InnerStreamWrapper<StreamStub>(context, mMetadata)) {}
ndk::ScopedAStatus StreamSwitcher::closeCurrentStream(bool validateStreamState) {
if (!mStream) return ndk::ScopedAStatus::ok();
RETURN_STATUS_IF_ERROR(mStream->prepareToClose());
RETURN_STATUS_IF_ERROR(mStream->close());
if (validateStreamState && !isValidClosingStreamState(mStream->getStatePriorToClosing())) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
mStream.reset();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::close() {
if (mStream != nullptr) {
auto status = closeCurrentStream(false /*validateStreamState*/);
// The actual state is irrelevant since only StreamSwitcher cares about it.
onClose(StreamDescriptor::State::STANDBY);
return status;
}
LOG(ERROR) << __func__ << ": stream was already closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus StreamSwitcher::prepareToClose() {
if (mStream != nullptr) {
return mStream->prepareToClose();
}
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus StreamSwitcher::updateHwAvSyncId(int32_t in_hwAvSyncId) {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
RETURN_STATUS_IF_ERROR(mStream->updateHwAvSyncId(in_hwAvSyncId));
mHwAvSyncId = in_hwAvSyncId;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::getVendorParameters(const std::vector<std::string>& in_ids,
std::vector<VendorParameter>* _aidl_return) {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (mIsStubStream) {
LOG(ERROR) << __func__ << ": the stream is not connected";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
return mStream->getVendorParameters(in_ids, _aidl_return);
}
ndk::ScopedAStatus StreamSwitcher::setVendorParameters(
const std::vector<VendorParameter>& in_parameters, bool in_async) {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (mIsStubStream) {
mMissedParameters.emplace_back(in_parameters, in_async);
return ndk::ScopedAStatus::ok();
}
return mStream->setVendorParameters(in_parameters, in_async);
}
ndk::ScopedAStatus StreamSwitcher::addEffect(const std::shared_ptr<IEffect>& in_effect) {
if (in_effect == nullptr) {
LOG(DEBUG) << __func__ << ": null effect";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (!mIsStubStream) {
RETURN_STATUS_IF_ERROR(mStream->addEffect(in_effect));
}
mEffects.push_back(in_effect);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::removeEffect(const std::shared_ptr<IEffect>& in_effect) {
if (in_effect == nullptr) {
LOG(DEBUG) << __func__ << ": null effect";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
for (auto it = mEffects.begin(); it != mEffects.end(); ++it) {
if ((*it)->asBinder() == in_effect->asBinder()) {
mEffects.erase(it);
break;
}
}
return !mIsStubStream ? mStream->removeEffect(in_effect) : ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::getStreamCommonCommon(
std::shared_ptr<IStreamCommon>* _aidl_return) {
if (!mCommon) {
LOG(FATAL) << __func__ << ": the common interface was not created";
}
*_aidl_return = mCommon.getInstance();
LOG(DEBUG) << __func__ << ": returning " << _aidl_return->get()->asBinder().get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::updateMetadataCommon(const Metadata& metadata) {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
mMetadata = metadata;
return !mIsStubStream ? mStream->updateMetadataCommon(metadata) : ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::initInstance(
const std::shared_ptr<StreamCommonInterface>& delegate) {
mCommon = ndk::SharedRefBase::make<StreamCommonDelegator>(delegate);
// The delegate is null because StreamSwitcher handles IStreamCommon methods by itself.
return mStream->initInstance(nullptr);
}
const StreamContext& StreamSwitcher::getContext() const {
return *mContext;
}
bool StreamSwitcher::isClosed() const {
return mStream == nullptr || mStream->isClosed();
}
const StreamCommonInterface::ConnectedDevices& StreamSwitcher::getConnectedDevices() const {
return mStream->getConnectedDevices();
}
ndk::ScopedAStatus StreamSwitcher::setConnectedDevices(const std::vector<AudioDevice>& devices) {
LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(devices);
if (mStream->getConnectedDevices() == devices) return ndk::ScopedAStatus::ok();
const DeviceSwitchBehavior behavior = switchCurrentStream(devices);
if (behavior == DeviceSwitchBehavior::UNSUPPORTED_DEVICES) {
return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
} else if (behavior == DeviceSwitchBehavior::SWITCH_TO_STUB_STREAM && !devices.empty()) {
// This is an error in the extending class.
LOG(FATAL) << __func__
<< ": switching to stub stream with connected devices is not allowed";
}
if (behavior == USE_CURRENT_STREAM) {
mIsStubStream = false;
} else {
LOG(DEBUG) << __func__ << ": connected devices changed, switching stream";
// Two streams can't be opened for the same context, thus we always need to close
// the current one before creating a new one.
RETURN_STATUS_IF_ERROR(closeCurrentStream(true /*validateStreamState*/));
if (behavior == CREATE_NEW_STREAM) {
mStream = createNewStream(devices, mContext, mMetadata);
mIsStubStream = false;
} else { // SWITCH_TO_STUB_STREAM
mStream.reset(new InnerStreamWrapper<StreamStub>(mContext, mMetadata));
mIsStubStream = true;
}
// The delegate is null because StreamSwitcher handles IStreamCommon methods by itself.
if (ndk::ScopedAStatus status = mStream->initInstance(nullptr); !status.isOk()) {
if (mIsStubStream) {
LOG(FATAL) << __func__
<< ": failed to initialize stub stream: " << status.getDescription();
}
// Need to close the current failed stream, and report an error.
// Since we can't operate without a stream implementation, put a stub in.
RETURN_STATUS_IF_ERROR(closeCurrentStream(false /*validateStreamState*/));
mStream.reset(new InnerStreamWrapper<StreamStub>(mContext, mMetadata));
(void)mStream->initInstance(nullptr);
(void)mStream->setConnectedDevices(devices);
return status;
}
}
RETURN_STATUS_IF_ERROR(mStream->setConnectedDevices(devices));
if (behavior == CREATE_NEW_STREAM) {
// These updates are less critical, only log warning on failure.
if (mHwAvSyncId.has_value()) {
if (auto status = mStream->updateHwAvSyncId(*mHwAvSyncId); !status.isOk()) {
LOG(WARNING) << __func__ << ": could not update HW AV Sync for a new stream: "
<< status.getDescription();
}
}
for (const auto& vndParam : mMissedParameters) {
if (auto status = mStream->setVendorParameters(vndParam.first, vndParam.second);
!status.isOk()) {
LOG(WARNING) << __func__ << ": error while setting parameters for a new stream: "
<< status.getDescription();
}
}
mMissedParameters.clear();
for (const auto& effect : mEffects) {
if (auto status = mStream->addEffect(effect); !status.isOk()) {
LOG(WARNING) << __func__ << ": error while adding effect for a new stream: "
<< status.getDescription();
}
}
if (mBluetoothParametersUpdated) {
if (auto status = mStream->bluetoothParametersUpdated(); !status.isOk()) {
LOG(WARNING) << __func__
<< ": error while updating BT parameters for a new stream: "
<< status.getDescription();
}
}
mBluetoothParametersUpdated = false;
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamSwitcher::bluetoothParametersUpdated() {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
if (mIsStubStream) {
mBluetoothParametersUpdated = true;
return ndk::ScopedAStatus::ok();
}
return mStream->bluetoothParametersUpdated();
}
ndk::ScopedAStatus StreamSwitcher::setGain(float gain) {
if (mStream == nullptr) {
LOG(ERROR) << __func__ << ": stream was closed";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
return mStream->setGain(gain);
}
} // namespace aidl::android::hardware::audio::core::deprecated

View File

@@ -0,0 +1,202 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
** This class is deprecated because its use causes threading issues
** with the FMQ due to change of threads reading and writing into FMQ.
**
** DO NOT USE. These files will be removed.
**/
#pragma once
#include "core-impl/Stream.h"
namespace aidl::android::hardware::audio::core::deprecated {
// 'StreamSwitcher' is an implementation of 'StreamCommonInterface' which allows
// dynamically switching the underlying stream implementation based on currently
// connected devices. This is achieved by replacing inheritance from
// 'StreamCommonImpl' with owning an instance of it. StreamSwitcher must be
// extended in order to supply the logic for choosing the stream
// implementation. When there are no connected devices, for instance, upon the
// creation, the StreamSwitcher engages an instance of a stub stream in order to
// keep serving requests coming via 'StreamDescriptor'.
//
// StreamSwitcher implements the 'IStreamCommon' interface directly, with
// necessary delegation to the current stream implementation. While the stub
// stream is engaged, any requests made via 'IStreamCommon' (parameters, effects
// setting, etc) are postponed and only delivered on device connection change
// to the "real" stream implementation provided by the extending class. This is why
// the behavior of StreamSwitcher in the "stub" state is not identical to behavior
// of 'StreamStub'. It can become a full substitute for 'StreamStub' once
// device connection change event occurs and the extending class returns
// 'LEAVE_CURRENT_STREAM' from 'switchCurrentStream' method.
//
// There is a natural limitation that the current stream implementation may only
// be switched when the stream is in the 'STANDBY' state. Thus, when the event
// to switch the stream occurs, the current stream is stopped and joined, and
// its last state is validated. Since the change of the set of connected devices
// normally occurs on patch updates, if the stream was not in standby, this is
// reported to the caller of 'IModule.setAudioPatch' as the 'EX_ILLEGAL_STATE'
// error.
//
// The simplest use case, when the implementor just needs to emulate the legacy HAL API
// behavior of receiving the connected devices upon stream creation, the implementation
// of the extending class can look as follows. We assume that 'StreamLegacy' implementation
// is the one requiring to know connected devices on creation:
//
// class StreamLegacy : public StreamCommonImpl {
// public:
// StreamLegacy(StreamContext* context, const Metadata& metadata,
// const std::vector<AudioDevice>& devices);
// };
//
// class StreamOutLegacy final : public StreamOut, public StreamSwitcher {
// public:
// StreamOutLegacy(StreamContext&& context, metatadata etc.)
// private:
// DeviceSwitchBehavior switchCurrentStream(const std::vector<AudioDevice>&) override {
// // This implementation effectively postpones stream creation until
// // receiving the first call to 'setConnectedDevices' with a non-empty list.
// return isStubStream() ? DeviceSwitchBehavior::CREATE_NEW_STREAM :
// DeviceSwitchBehavior::USE_CURRENT_STREAM;
// }
// std::unique_ptr<StreamCommonInterfaceEx> createNewStream(
// const std::vector<AudioDevice>& devices,
// StreamContext* context, const Metadata& metadata) override {
// return std::unique_ptr<StreamCommonInterfaceEx>(new InnerStreamWrapper<StreamLegacy>(
// context, metadata, devices));
// }
// void onClose(StreamDescriptor::State) override { defaultOnClose(); }
// }
//
class StreamCommonInterfaceEx : virtual public StreamCommonInterface {
public:
virtual StreamDescriptor::State getStatePriorToClosing() const = 0;
};
template <typename T>
class InnerStreamWrapper : public T, public StreamCommonInterfaceEx {
public:
template <typename... Args>
InnerStreamWrapper(Args&&... args) : T(std::forward<Args>(args)...) {}
StreamDescriptor::State getStatePriorToClosing() const override { return mStatePriorToClosing; }
private:
// Do not need to do anything on close notification from the inner stream
// because StreamSwitcher handles IStreamCommon::close by itself.
void onClose(StreamDescriptor::State statePriorToClosing) override {
mStatePriorToClosing = statePriorToClosing;
}
StreamDescriptor::State mStatePriorToClosing = StreamDescriptor::State::STANDBY;
};
class StreamSwitcher : virtual public StreamCommonInterface {
public:
StreamSwitcher(StreamContext* context, const Metadata& metadata);
ndk::ScopedAStatus close() override;
ndk::ScopedAStatus prepareToClose() override;
ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override;
ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_ids,
std::vector<VendorParameter>* _aidl_return) override;
ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
bool in_async) override;
ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus getStreamCommonCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override;
ndk::ScopedAStatus initInstance(
const std::shared_ptr<StreamCommonInterface>& delegate) override;
const StreamContext& getContext() const override;
bool isClosed() const override;
const ConnectedDevices& getConnectedDevices() const override;
ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices)
override;
ndk::ScopedAStatus bluetoothParametersUpdated() override;
ndk::ScopedAStatus setGain(float gain) override;
protected:
// Since switching a stream requires closing down the current stream, StreamSwitcher
// asks the extending class its intent on the connected devices change.
enum DeviceSwitchBehavior {
// Continue using the current stream implementation. If it's the stub implementation,
// StreamSwitcher starts treating the stub stream as a "real" implementation,
// without effectively closing it and starting again.
USE_CURRENT_STREAM,
// This is the normal case when the extending class provides a "real" implementation
// which is not a stub implementation.
CREATE_NEW_STREAM,
// This is the case when the extending class wants to revert back to the initial
// condition of using a stub stream provided by the StreamSwitcher. This behavior
// is only allowed when the list of connected devices is empty.
SWITCH_TO_STUB_STREAM,
// Use when the set of devices is not supported by the extending class. This returns
// 'EX_UNSUPPORTED_OPERATION' from 'setConnectedDevices'.
UNSUPPORTED_DEVICES,
};
// StreamSwitcher will call these methods from 'setConnectedDevices'. If the switch behavior
// is 'CREATE_NEW_STREAM', the 'createwNewStream' function will be called (with the same
// device vector) for obtaining a new stream implementation, assuming that closing
// the current stream was a success.
virtual DeviceSwitchBehavior switchCurrentStream(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) = 0;
virtual std::unique_ptr<StreamCommonInterfaceEx> createNewStream(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
StreamContext* context, const Metadata& metadata) = 0;
virtual void onClose(StreamDescriptor::State streamPriorToClosing) = 0;
bool isStubStream() const { return mIsStubStream; }
StreamCommonInterfaceEx* getCurrentStream() const { return mStream.get(); }
private:
using VndParam = std::pair<std::vector<VendorParameter>, bool /*isAsync*/>;
static constexpr bool isValidClosingStreamState(StreamDescriptor::State state) {
return state == StreamDescriptor::State::STANDBY || state == StreamDescriptor::State::ERROR;
}
ndk::ScopedAStatus closeCurrentStream(bool validateStreamState);
// StreamSwitcher does not own the context.
StreamContext* mContext;
Metadata mMetadata;
ChildInterface<StreamCommonDelegator> mCommon;
// The current stream.
std::unique_ptr<StreamCommonInterfaceEx> mStream;
// Indicates whether 'mCurrentStream' is a stub stream implementation
// maintained by StreamSwitcher until the extending class provides a "real"
// implementation. The invariant of this state is that there are no connected
// devices.
bool mIsStubStream = true;
// Storage for the data from commands received via 'IStreamCommon'.
std::optional<int32_t> mHwAvSyncId;
std::vector<VndParam> mMissedParameters;
std::vector<std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>> mEffects;
bool mBluetoothParametersUpdated = false;
};
} // namespace aidl::android::hardware::audio::core::deprecated

40
audio/downmix/Android.bp Normal file
View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libdownmixsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"DownmixSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default",
],
}

164
audio/downmix/DownmixSw.cpp Normal file
View File

@@ -0,0 +1,164 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#define LOG_TAG "AHAL_DownmixSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "DownmixSw.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::DownmixSw;
using aidl::android::hardware::audio::effect::getEffectImplUuidDownmixSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmixSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DownmixSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmixSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = DownmixSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string DownmixSw::kEffectName = "DownmixSw";
const Descriptor DownmixSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidDownmix(),
.uuid = getEffectImplUuidDownmixSw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = kEffectName,
.implementor = "The Android Open Source Project"}};
ndk::ScopedAStatus DownmixSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus DownmixSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& dmParam = specific.get<Parameter::Specific::downmix>();
auto tag = dmParam.getTag();
switch (tag) {
case Downmix::type: {
RETURN_IF(mContext->setDmType(dmParam.get<Downmix::type>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setTypeFailed");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"DownmixTagNotSupported");
}
}
}
ndk::ScopedAStatus DownmixSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::downmixTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto dmId = id.get<Parameter::Id::downmixTag>();
auto dmIdTag = dmId.getTag();
switch (dmIdTag) {
case Downmix::Id::commonTag:
return getParameterDownmix(dmId.get<Downmix::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"DownmixTagNotSupported");
}
}
ndk::ScopedAStatus DownmixSw::getParameterDownmix(const Downmix::Tag& tag,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
Downmix dmParam;
switch (tag) {
case Downmix::type: {
dmParam.set<Downmix::type>(mContext->getDmType());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"DownmixTagNotSupported");
}
}
specific->set<Parameter::Specific::downmix>(dmParam);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> DownmixSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<DownmixSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode DownmixSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status DownmixSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
} // namespace aidl::android::hardware::audio::effect

77
audio/downmix/DownmixSw.h Normal file
View File

@@ -0,0 +1,77 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class DownmixSwContext final : public EffectContext {
public:
DownmixSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setDmType(Downmix::Type type) {
// TODO : Add implementation to apply new type
mType = type;
return RetCode::SUCCESS;
}
Downmix::Type getDmType() const { return mType; }
private:
Downmix::Type mType = Downmix::Type::STRIP;
};
class DownmixSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
DownmixSw() { LOG(DEBUG) << __func__; }
~DownmixSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int sample)
REQUIRES(mImplMutex) override;
private:
std::shared_ptr<DownmixSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterDownmix(const Downmix::Tag& tag, Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libdynamicsprocessingsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"DynamicsProcessingSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default",
],
}

View File

@@ -0,0 +1,523 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <set>
#include <unordered_set>
#define LOG_TAG "AHAL_DynamicsProcessingSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "DynamicsProcessingSw.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::DynamicsProcessingSw;
using aidl::android::hardware::audio::effect::getEffectImplUuidDynamicsProcessingSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessingSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<DynamicsProcessingSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessingSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = DynamicsProcessingSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string DynamicsProcessingSw::kEffectName = "DynamicsProcessingSw";
const DynamicsProcessing::EqBandConfig DynamicsProcessingSw::kEqBandConfigMin =
DynamicsProcessing::EqBandConfig({.channel = 0,
.band = 0,
.enable = false,
.cutoffFrequencyHz = 220,
.gainDb = std::numeric_limits<float>::min()});
const DynamicsProcessing::EqBandConfig DynamicsProcessingSw::kEqBandConfigMax =
DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
.band = std::numeric_limits<int>::max(),
.enable = true,
.cutoffFrequencyHz = 20000,
.gainDb = std::numeric_limits<float>::max()});
const Range::DynamicsProcessingRange DynamicsProcessingSw::kPreEqBandRange = {
.min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
{DynamicsProcessingSw::kEqBandConfigMin}),
.max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
{DynamicsProcessingSw::kEqBandConfigMax})};
const Range::DynamicsProcessingRange DynamicsProcessingSw::kPostEqBandRange = {
.min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
{DynamicsProcessingSw::kEqBandConfigMin}),
.max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
{DynamicsProcessingSw::kEqBandConfigMax})};
const std::vector<Range::DynamicsProcessingRange> DynamicsProcessingSw::kRanges = {
DynamicsProcessingSw::kPreEqBandRange, DynamicsProcessingSw::kPostEqBandRange};
const Capability DynamicsProcessingSw::kCapability = {.range = DynamicsProcessingSw::kRanges};
const Descriptor DynamicsProcessingSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(),
.uuid = getEffectImplUuidDynamicsProcessingSw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::POST_PROC,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = DynamicsProcessingSw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = DynamicsProcessingSw::kCapability};
ndk::ScopedAStatus DynamicsProcessingSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus DynamicsProcessingSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::dynamicsProcessing != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
LOG(INFO) << __func__ << specific.toString();
auto& dpParam = specific.get<Parameter::Specific::dynamicsProcessing>();
auto tag = dpParam.getTag();
switch (tag) {
case DynamicsProcessing::engineArchitecture: {
RETURN_IF(mContext->setEngineArchitecture(
dpParam.get<DynamicsProcessing::engineArchitecture>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setEngineArchitectureFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::preEq: {
RETURN_IF(mContext->setPreEqChannelCfgs(dpParam.get<DynamicsProcessing::preEq>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setPreEqChannelCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::postEq: {
RETURN_IF(mContext->setPostEqChannelCfgs(dpParam.get<DynamicsProcessing::postEq>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setPostEqChannelCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::mbc: {
RETURN_IF(mContext->setMbcChannelCfgs(dpParam.get<DynamicsProcessing::mbc>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setMbcChannelCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::preEqBand: {
RETURN_IF(mContext->setPreEqBandCfgs(dpParam.get<DynamicsProcessing::preEqBand>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setPreEqBandCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::postEqBand: {
RETURN_IF(mContext->setPostEqBandCfgs(dpParam.get<DynamicsProcessing::postEqBand>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setPostEqBandCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::mbcBand: {
RETURN_IF(mContext->setMbcBandCfgs(dpParam.get<DynamicsProcessing::mbcBand>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setMbcBandCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::limiter: {
RETURN_IF(mContext->setLimiterCfgs(dpParam.get<DynamicsProcessing::limiter>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "limiterCfgsFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::inputGain: {
RETURN_IF(mContext->setInputGainCfgs(dpParam.get<DynamicsProcessing::inputGain>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "inputGainCfgFailed");
return ndk::ScopedAStatus::ok();
}
case DynamicsProcessing::vendor: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported");
}
}
}
ndk::ScopedAStatus DynamicsProcessingSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::dynamicsProcessingTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto dpId = id.get<Parameter::Id::dynamicsProcessingTag>();
auto dpIdTag = dpId.getTag();
switch (dpIdTag) {
case DynamicsProcessing::Id::commonTag:
return getParameterDynamicsProcessing(dpId.get<DynamicsProcessing::Id::commonTag>(),
specific);
case DynamicsProcessing::Id::vendorExtensionTag:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(dpIdTag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported");
}
}
ndk::ScopedAStatus DynamicsProcessingSw::getParameterDynamicsProcessing(
const DynamicsProcessing::Tag& tag, Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
DynamicsProcessing dpParam;
switch (tag) {
case DynamicsProcessing::Tag::engineArchitecture: {
dpParam.set<DynamicsProcessing::engineArchitecture>(mContext->getEngineArchitecture());
break;
}
case DynamicsProcessing::Tag::preEq: {
dpParam.set<DynamicsProcessing::preEq>(mContext->getPreEqChannelCfgs());
break;
}
case DynamicsProcessing::Tag::postEq: {
dpParam.set<DynamicsProcessing::postEq>(mContext->getPostEqChannelCfgs());
break;
}
case DynamicsProcessing::Tag::mbc: {
dpParam.set<DynamicsProcessing::mbc>(mContext->getMbcChannelCfgs());
break;
}
case DynamicsProcessing::Tag::preEqBand: {
dpParam.set<DynamicsProcessing::preEqBand>(mContext->getPreEqBandCfgs());
break;
}
case DynamicsProcessing::Tag::postEqBand: {
dpParam.set<DynamicsProcessing::postEqBand>(mContext->getPostEqBandCfgs());
break;
}
case DynamicsProcessing::Tag::mbcBand: {
dpParam.set<DynamicsProcessing::mbcBand>(mContext->getMbcBandCfgs());
break;
}
case DynamicsProcessing::Tag::limiter: {
dpParam.set<DynamicsProcessing::limiter>(mContext->getLimiterCfgs());
break;
}
case DynamicsProcessing::Tag::inputGain: {
dpParam.set<DynamicsProcessing::inputGain>(mContext->getInputGainCfgs());
break;
}
case DynamicsProcessing::vendor: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "DynamicsProcessingTagNotSupported");
}
}
specific->set<Parameter::Specific::dynamicsProcessing>(dpParam);
LOG(INFO) << __func__ << specific->toString();
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> DynamicsProcessingSw::createContext(
const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<DynamicsProcessingSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode DynamicsProcessingSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status DynamicsProcessingSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode DynamicsProcessingSwContext::setCommon(const Parameter::Common& common) {
if (auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
return ret;
}
mCommon = common;
mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
common.input.base.channelMask);
resizeChannels();
resizeBands();
LOG(INFO) << __func__ << mCommon.toString();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingSwContext::setEngineArchitecture(
const DynamicsProcessing::EngineArchitecture& cfg) {
RETURN_VALUE_IF(!validateEngineConfig(cfg), RetCode::ERROR_ILLEGAL_PARAMETER,
"illegalEngineConfig");
if (mEngineSettings == cfg) {
LOG(INFO) << __func__ << " not change in engine, do nothing";
return RetCode::SUCCESS;
}
mEngineSettings = cfg;
resizeBands();
return RetCode::SUCCESS;
}
RetCode DynamicsProcessingSwContext::setChannelCfgs(
const std::vector<DynamicsProcessing::ChannelConfig>& cfgs,
std::vector<DynamicsProcessing::ChannelConfig>& targetCfgs,
const DynamicsProcessing::StageEnablement& stage) {
RETURN_VALUE_IF(!stage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
RetCode ret = RetCode::SUCCESS;
std::unordered_set<int> channelSet;
for (auto& cfg : cfgs) {
if (cfg.channel < 0 || (size_t)cfg.channel >= mChannelCount) {
LOG(ERROR) << __func__ << " skip illegal channel config " << cfg.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
if (0 != channelSet.count(cfg.channel)) {
LOG(WARNING) << __func__ << " duplicated channel " << cfg.channel;
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
} else {
channelSet.insert(cfg.channel);
}
targetCfgs[cfg.channel] = cfg;
}
return ret;
}
RetCode DynamicsProcessingSwContext::setPreEqChannelCfgs(
const std::vector<DynamicsProcessing::ChannelConfig>& cfgs) {
return setChannelCfgs(cfgs, mPreEqChCfgs, mEngineSettings.preEqStage);
}
RetCode DynamicsProcessingSwContext::setPostEqChannelCfgs(
const std::vector<DynamicsProcessing::ChannelConfig>& cfgs) {
return setChannelCfgs(cfgs, mPostEqChCfgs, mEngineSettings.postEqStage);
}
RetCode DynamicsProcessingSwContext::setMbcChannelCfgs(
const std::vector<DynamicsProcessing::ChannelConfig>& cfgs) {
return setChannelCfgs(cfgs, mMbcChCfgs, mEngineSettings.mbcStage);
}
RetCode DynamicsProcessingSwContext::setEqBandCfgs(
const std::vector<DynamicsProcessing::EqBandConfig>& cfgs,
std::vector<DynamicsProcessing::EqBandConfig>& targetCfgs,
const DynamicsProcessing::StageEnablement& stage,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig) {
RETURN_VALUE_IF(!stage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER, "eqStageNotInUse");
RetCode ret = RetCode::SUCCESS;
std::set<std::pair<int /* channel */, int /* band */>> bandSet;
for (auto& cfg : cfgs) {
if (0 != bandSet.count({cfg.channel, cfg.band})) {
LOG(WARNING) << __func__ << " duplicated band " << cfg.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
} else {
bandSet.insert({cfg.channel, cfg.band});
}
if (!validateEqBandConfig(cfg, mChannelCount, stage.bandCount, channelConfig)) {
LOG(WARNING) << __func__ << " skip invalid band " << cfg.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
targetCfgs[cfg.channel * stage.bandCount + cfg.band] = cfg;
}
return ret;
}
RetCode DynamicsProcessingSwContext::setPreEqBandCfgs(
const std::vector<DynamicsProcessing::EqBandConfig>& cfgs) {
return setEqBandCfgs(cfgs, mPreEqChBands, mEngineSettings.preEqStage, mPreEqChCfgs);
}
RetCode DynamicsProcessingSwContext::setPostEqBandCfgs(
const std::vector<DynamicsProcessing::EqBandConfig>& cfgs) {
return setEqBandCfgs(cfgs, mPostEqChBands, mEngineSettings.postEqStage, mPostEqChCfgs);
}
RetCode DynamicsProcessingSwContext::setMbcBandCfgs(
const std::vector<DynamicsProcessing::MbcBandConfig>& cfgs) {
RETURN_VALUE_IF(!mEngineSettings.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"mbcNotInUse");
RetCode ret = RetCode::SUCCESS;
std::set<std::pair<int /* channel */, int /* band */>> bandSet;
int bandCount = mEngineSettings.mbcStage.bandCount;
std::vector<bool> filled(mChannelCount * bandCount, false);
for (auto& it : cfgs) {
if (0 != bandSet.count({it.channel, it.band})) {
LOG(WARNING) << __func__ << " duplicated band " << it.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
} else {
bandSet.insert({it.channel, it.band});
}
if (!validateMbcBandConfig(it, mChannelCount, mEngineSettings.mbcStage.bandCount,
mMbcChCfgs)) {
LOG(WARNING) << __func__ << " skip invalid band " << it.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
mMbcChBands[it.channel * bandCount + it.band] = it;
}
return ret;
}
RetCode DynamicsProcessingSwContext::setLimiterCfgs(
const std::vector<DynamicsProcessing::LimiterConfig>& cfgs) {
RETURN_VALUE_IF(!mEngineSettings.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"limiterNotInUse");
RetCode ret = RetCode::SUCCESS;
std::unordered_set<int> channelSet;
for (auto& it : cfgs) {
if (0 != channelSet.count(it.channel)) {
LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
} else {
channelSet.insert(it.channel);
}
if (!validateLimiterConfig(it, mChannelCount)) {
LOG(WARNING) << __func__ << " skip invalid limiter " << it.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
}
mLimiterCfgs[it.channel] = it;
}
return ret;
}
void DynamicsProcessingSwContext::resizeChannels() {
if (mPreEqChCfgs.size() != mChannelCount) {
mPreEqChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId});
}
if (mPostEqChCfgs.size() != mChannelCount) {
mPostEqChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId});
}
if (mMbcChCfgs.size() != mChannelCount) {
mMbcChCfgs.resize(mChannelCount, {.channel = kInvalidChannelId});
}
if (mLimiterCfgs.size() != mChannelCount) {
mLimiterCfgs.resize(mChannelCount, {.channel = kInvalidChannelId});
}
if (mInputGainCfgs.size() != mChannelCount) {
mInputGainCfgs.resize(mChannelCount, {.channel = kInvalidChannelId});
}
}
void DynamicsProcessingSwContext::resizeBands() {
if (mPreEqChBands.size() != (size_t)(mChannelCount * mEngineSettings.preEqStage.bandCount)) {
mPreEqChBands.resize(mChannelCount * mEngineSettings.preEqStage.bandCount,
{.channel = kInvalidChannelId});
}
if (mPostEqChBands.size() != (size_t)(mChannelCount * mEngineSettings.postEqStage.bandCount)) {
mPostEqChBands.resize(mChannelCount * mEngineSettings.postEqStage.bandCount,
{.channel = kInvalidChannelId});
}
if (mMbcChBands.size() != (size_t)(mChannelCount * mEngineSettings.mbcStage.bandCount)) {
mMbcChBands.resize(mChannelCount * mEngineSettings.mbcStage.bandCount,
{.channel = kInvalidChannelId});
}
}
RetCode DynamicsProcessingSwContext::setInputGainCfgs(
const std::vector<DynamicsProcessing::InputGain>& cfgs) {
for (const auto& cfg : cfgs) {
RETURN_VALUE_IF(cfg.channel < 0 || (size_t)cfg.channel >= mChannelCount,
RetCode::ERROR_ILLEGAL_PARAMETER, "invalidChannel");
mInputGainCfgs[cfg.channel] = cfg;
}
return RetCode::SUCCESS;
}
std::vector<DynamicsProcessing::InputGain> DynamicsProcessingSwContext::getInputGainCfgs() {
std::vector<DynamicsProcessing::InputGain> ret;
std::copy_if(mInputGainCfgs.begin(), mInputGainCfgs.end(), std::back_inserter(ret),
[&](const auto& gain) { return gain.channel != kInvalidChannelId; });
return ret;
}
bool DynamicsProcessingSwContext::validateStageEnablement(
const DynamicsProcessing::StageEnablement& enablement) {
return !enablement.inUse || (enablement.inUse && enablement.bandCount > 0);
}
bool DynamicsProcessingSwContext::validateEngineConfig(
const DynamicsProcessing::EngineArchitecture& engine) {
return engine.preferredProcessingDurationMs >= 0 &&
validateStageEnablement(engine.preEqStage) &&
validateStageEnablement(engine.postEqStage) && validateStageEnablement(engine.mbcStage);
}
bool DynamicsProcessingSwContext::validateEqBandConfig(
const DynamicsProcessing::EqBandConfig& band, int maxChannel, int maxBand,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig) {
return band.channel >= 0 && band.channel < maxChannel &&
(size_t)band.channel < channelConfig.size() && channelConfig[band.channel].enable &&
band.band >= 0 && band.band < maxBand;
}
bool DynamicsProcessingSwContext::validateMbcBandConfig(
const DynamicsProcessing::MbcBandConfig& band, int maxChannel, int maxBand,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig) {
return band.channel >= 0 && band.channel < maxChannel &&
(size_t)band.channel < channelConfig.size() && channelConfig[band.channel].enable &&
band.band >= 0 && band.band < maxBand && band.attackTimeMs >= 0 &&
band.releaseTimeMs >= 0 && band.ratio >= 0 && band.thresholdDb <= 0 &&
band.kneeWidthDb <= 0 && band.noiseGateThresholdDb <= 0 && band.expanderRatio >= 0;
}
bool DynamicsProcessingSwContext::validateLimiterConfig(
const DynamicsProcessing::LimiterConfig& limiter, int maxChannel) {
return limiter.channel >= 0 && limiter.channel < maxChannel && limiter.attackTimeMs >= 0 &&
limiter.releaseTimeMs >= 0 && limiter.ratio >= 0 && limiter.thresholdDb <= 0;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,142 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cstdlib>
#include <memory>
#include <vector>
#include <Utils.h>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class DynamicsProcessingSwContext final : public EffectContext {
public:
DynamicsProcessingSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common),
mChannelCount(::aidl::android::hardware::audio::common::getChannelCount(
common.input.base.channelMask)),
mPreEqChCfgs(mChannelCount, {.channel = kInvalidChannelId}),
mPostEqChCfgs(mChannelCount, {.channel = kInvalidChannelId}),
mMbcChCfgs(mChannelCount, {.channel = kInvalidChannelId}),
mLimiterCfgs(mChannelCount, {.channel = kInvalidChannelId}) {
LOG(DEBUG) << __func__;
}
// utils
RetCode setChannelCfgs(const std::vector<DynamicsProcessing::ChannelConfig>& cfgs,
std::vector<DynamicsProcessing::ChannelConfig>& targetCfgs,
const DynamicsProcessing::StageEnablement& engineSetting);
RetCode setEqBandCfgs(const std::vector<DynamicsProcessing::EqBandConfig>& cfgs,
std::vector<DynamicsProcessing::EqBandConfig>& targetCfgs,
const DynamicsProcessing::StageEnablement& stage,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig);
// set params
RetCode setCommon(const Parameter::Common& common) override;
RetCode setEngineArchitecture(const DynamicsProcessing::EngineArchitecture& cfg);
RetCode setPreEqChannelCfgs(const std::vector<DynamicsProcessing::ChannelConfig>& cfgs);
RetCode setPostEqChannelCfgs(const std::vector<DynamicsProcessing::ChannelConfig>& cfgs);
RetCode setMbcChannelCfgs(const std::vector<DynamicsProcessing::ChannelConfig>& cfgs);
RetCode setPreEqBandCfgs(const std::vector<DynamicsProcessing::EqBandConfig>& cfgs);
RetCode setPostEqBandCfgs(const std::vector<DynamicsProcessing::EqBandConfig>& cfgs);
RetCode setMbcBandCfgs(const std::vector<DynamicsProcessing::MbcBandConfig>& cfgs);
RetCode setLimiterCfgs(const std::vector<DynamicsProcessing::LimiterConfig>& cfgs);
RetCode setInputGainCfgs(const std::vector<DynamicsProcessing::InputGain>& cfgs);
// get params
DynamicsProcessing::EngineArchitecture getEngineArchitecture() { return mEngineSettings; }
std::vector<DynamicsProcessing::ChannelConfig> getPreEqChannelCfgs() { return mPreEqChCfgs; }
std::vector<DynamicsProcessing::ChannelConfig> getPostEqChannelCfgs() { return mPostEqChCfgs; }
std::vector<DynamicsProcessing::ChannelConfig> getMbcChannelCfgs() { return mMbcChCfgs; }
std::vector<DynamicsProcessing::EqBandConfig> getPreEqBandCfgs() { return mPreEqChBands; }
std::vector<DynamicsProcessing::EqBandConfig> getPostEqBandCfgs() { return mPostEqChBands; }
std::vector<DynamicsProcessing::MbcBandConfig> getMbcBandCfgs() { return mMbcChBands; }
std::vector<DynamicsProcessing::LimiterConfig> getLimiterCfgs() { return mLimiterCfgs; }
std::vector<DynamicsProcessing::InputGain> getInputGainCfgs();
private:
static constexpr int32_t kInvalidChannelId = -1;
size_t mChannelCount = 0;
DynamicsProcessing::EngineArchitecture mEngineSettings;
// Channel config vector with size of mChannelCount
std::vector<DynamicsProcessing::ChannelConfig> mPreEqChCfgs;
std::vector<DynamicsProcessing::ChannelConfig> mPostEqChCfgs;
std::vector<DynamicsProcessing::ChannelConfig> mMbcChCfgs;
std::vector<DynamicsProcessing::LimiterConfig> mLimiterCfgs;
std::vector<DynamicsProcessing::InputGain> mInputGainCfgs;
// Band config vector with size of mChannelCount * bandCount
std::vector<DynamicsProcessing::EqBandConfig> mPreEqChBands;
std::vector<DynamicsProcessing::EqBandConfig> mPostEqChBands;
std::vector<DynamicsProcessing::MbcBandConfig> mMbcChBands;
bool validateStageEnablement(const DynamicsProcessing::StageEnablement& enablement);
bool validateEngineConfig(const DynamicsProcessing::EngineArchitecture& engine);
bool validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band, int maxChannel,
int maxBand,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig);
bool validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band, int maxChannel,
int maxBand,
const std::vector<DynamicsProcessing::ChannelConfig>& channelConfig);
bool validateLimiterConfig(const DynamicsProcessing::LimiterConfig& limiter, int maxChannel);
void resizeChannels();
void resizeBands();
}; // DynamicsProcessingSwContext
class DynamicsProcessingSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
DynamicsProcessingSw() { LOG(DEBUG) << __func__; }
~DynamicsProcessingSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
private:
static const DynamicsProcessing::EqBandConfig kEqBandConfigMin;
static const DynamicsProcessing::EqBandConfig kEqBandConfigMax;
static const Range::DynamicsProcessingRange kPreEqBandRange;
static const Range::DynamicsProcessingRange kPostEqBandRange;
static const std::vector<Range::DynamicsProcessingRange> kRanges;
std::shared_ptr<DynamicsProcessingSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
}; // DynamicsProcessingSw
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libenvreverbsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"EnvReverbSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,327 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <unordered_set>
#define LOG_TAG "AHAL_EnvReverbSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "EnvReverbSw.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EnvReverbSw;
using aidl::android::hardware::audio::effect::getEffectImplUuidEnvReverbSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEnvReverbSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EnvReverbSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEnvReverbSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = EnvReverbSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string EnvReverbSw::kEffectName = "EnvReverbSw";
const std::vector<Range::EnvironmentalReverbRange> EnvReverbSw::kRanges = {
MAKE_RANGE(EnvironmentalReverb, roomLevelMb, -6000, 0),
MAKE_RANGE(EnvironmentalReverb, roomHfLevelMb, -4000, 0),
MAKE_RANGE(EnvironmentalReverb, decayTimeMs, 0, 7000),
MAKE_RANGE(EnvironmentalReverb, decayHfRatioPm, 100, 2000),
MAKE_RANGE(EnvironmentalReverb, reflectionsLevelMb, -6000, 0),
MAKE_RANGE(EnvironmentalReverb, reflectionsDelayMs, 0, 65),
MAKE_RANGE(EnvironmentalReverb, levelMb, -6000, 0),
MAKE_RANGE(EnvironmentalReverb, delayMs, 0, 65),
MAKE_RANGE(EnvironmentalReverb, diffusionPm, 0, 1000),
MAKE_RANGE(EnvironmentalReverb, densityPm, 0, 1000)};
const Capability EnvReverbSw::kCapability = {
.range = Range::make<Range::environmentalReverb>(EnvReverbSw::kRanges)};
const Descriptor EnvReverbSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidEnvReverb(),
.uuid = getEffectImplUuidEnvReverbSw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = EnvReverbSw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = EnvReverbSw::kCapability};
ndk::ScopedAStatus EnvReverbSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EnvReverbSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::environmentalReverb != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
auto& erParam = specific.get<Parameter::Specific::environmentalReverb>();
RETURN_IF(!inRange(erParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = erParam.getTag();
switch (tag) {
case EnvironmentalReverb::roomLevelMb: {
RETURN_IF(mContext->setErRoomLevel(erParam.get<EnvironmentalReverb::roomLevelMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setRoomLevelFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::roomHfLevelMb: {
RETURN_IF(
mContext->setErRoomHfLevel(erParam.get<EnvironmentalReverb::roomHfLevelMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setRoomHfLevelFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::decayTimeMs: {
RETURN_IF(mContext->setErDecayTime(erParam.get<EnvironmentalReverb::decayTimeMs>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDecayTimeFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::decayHfRatioPm: {
RETURN_IF(
mContext->setErDecayHfRatio(
erParam.get<EnvironmentalReverb::decayHfRatioPm>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDecayHfRatioFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::reflectionsLevelMb: {
RETURN_IF(mContext->setErReflectionsLevel(
erParam.get<EnvironmentalReverb::reflectionsLevelMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setReflectionsLevelFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::reflectionsDelayMs: {
RETURN_IF(mContext->setErReflectionsDelay(
erParam.get<EnvironmentalReverb::reflectionsDelayMs>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setReflectionsDelayFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::levelMb: {
RETURN_IF(mContext->setErLevel(erParam.get<EnvironmentalReverb::levelMb>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setLevelFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::delayMs: {
RETURN_IF(mContext->setErDelay(erParam.get<EnvironmentalReverb::delayMs>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDelayFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::diffusionPm: {
RETURN_IF(mContext->setErDiffusion(erParam.get<EnvironmentalReverb::diffusionPm>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDiffusionFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::densityPm: {
RETURN_IF(mContext->setErDensity(erParam.get<EnvironmentalReverb::densityPm>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDensityFailed");
return ndk::ScopedAStatus::ok();
}
case EnvironmentalReverb::bypass: {
RETURN_IF(mContext->setErBypass(erParam.get<EnvironmentalReverb::bypass>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setBypassFailed");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
}
}
}
ndk::ScopedAStatus EnvReverbSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::environmentalReverbTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto erId = id.get<Parameter::Id::environmentalReverbTag>();
auto erIdTag = erId.getTag();
switch (erIdTag) {
case EnvironmentalReverb::Id::commonTag:
return getParameterEnvironmentalReverb(erId.get<EnvironmentalReverb::Id::commonTag>(),
specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(erIdTag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
}
}
ndk::ScopedAStatus EnvReverbSw::getParameterEnvironmentalReverb(const EnvironmentalReverb::Tag& tag,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
EnvironmentalReverb erParam;
switch (tag) {
case EnvironmentalReverb::roomLevelMb: {
erParam.set<EnvironmentalReverb::roomLevelMb>(mContext->getErRoomLevel());
break;
}
case EnvironmentalReverb::roomHfLevelMb: {
erParam.set<EnvironmentalReverb::roomHfLevelMb>(mContext->getErRoomHfLevel());
break;
}
case EnvironmentalReverb::decayTimeMs: {
erParam.set<EnvironmentalReverb::decayTimeMs>(mContext->getErDecayTime());
break;
}
case EnvironmentalReverb::decayHfRatioPm: {
erParam.set<EnvironmentalReverb::decayHfRatioPm>(mContext->getErDecayHfRatio());
break;
}
case EnvironmentalReverb::reflectionsLevelMb: {
erParam.set<EnvironmentalReverb::reflectionsLevelMb>(mContext->getErReflectionsLevel());
break;
}
case EnvironmentalReverb::reflectionsDelayMs: {
erParam.set<EnvironmentalReverb::reflectionsDelayMs>(mContext->getErReflectionsDelay());
break;
}
case EnvironmentalReverb::levelMb: {
erParam.set<EnvironmentalReverb::levelMb>(mContext->getErLevel());
break;
}
case EnvironmentalReverb::delayMs: {
erParam.set<EnvironmentalReverb::delayMs>(mContext->getErDelay());
break;
}
case EnvironmentalReverb::diffusionPm: {
erParam.set<EnvironmentalReverb::diffusionPm>(mContext->getErDiffusion());
break;
}
case EnvironmentalReverb::densityPm: {
erParam.set<EnvironmentalReverb::densityPm>(mContext->getErDensity());
break;
}
case EnvironmentalReverb::bypass: {
erParam.set<EnvironmentalReverb::bypass>(mContext->getErBypass());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
}
}
specific->set<Parameter::Specific::environmentalReverb>(erParam);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> EnvReverbSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<EnvReverbSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode EnvReverbSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status EnvReverbSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode EnvReverbSwContext::setErRoomLevel(int roomLevel) {
mRoomLevel = roomLevel;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErRoomHfLevel(int roomHfLevel) {
mRoomHfLevel = roomHfLevel;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErDecayTime(int decayTime) {
mDecayTime = decayTime;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErDecayHfRatio(int decayHfRatio) {
mDecayHfRatio = decayHfRatio;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErLevel(int level) {
mLevel = level;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErDelay(int delay) {
mDelay = delay;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErDiffusion(int diffusion) {
mDiffusion = diffusion;
return RetCode::SUCCESS;
}
RetCode EnvReverbSwContext::setErDensity(int density) {
mDensity = density;
return RetCode::SUCCESS;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,122 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class EnvReverbSwContext final : public EffectContext {
public:
EnvReverbSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setErRoomLevel(int roomLevel);
int getErRoomLevel() const { return mRoomLevel; }
RetCode setErRoomHfLevel(int roomHfLevel);
int getErRoomHfLevel() const { return mRoomHfLevel; }
RetCode setErDecayTime(int decayTime);
int getErDecayTime() const { return mDecayTime; }
RetCode setErDecayHfRatio(int decayHfRatio);
int getErDecayHfRatio() const { return mDecayHfRatio; }
RetCode setErLevel(int level);
int getErLevel() const { return mLevel; }
RetCode setErDelay(int delay);
int getErDelay() const { return mDelay; }
RetCode setErDiffusion(int diffusion);
int getErDiffusion() const { return mDiffusion; }
RetCode setErDensity(int density);
int getErDensity() const { return mDensity; }
RetCode setErBypass(bool bypass) {
mBypass = bypass;
return RetCode::SUCCESS;
}
bool getErBypass() const { return mBypass; }
RetCode setErReflectionsDelay(int delay) {
mReflectionsDelayMs = delay;
return RetCode::SUCCESS;
}
bool getErReflectionsDelay() const { return mReflectionsDelayMs; }
RetCode setErReflectionsLevel(int level) {
mReflectionsLevelMb = level;
return RetCode::SUCCESS;
}
bool getErReflectionsLevel() const { return mReflectionsLevelMb; }
private:
int mRoomLevel = -6000; // Default room level
int mRoomHfLevel = 0; // Default room hf level
int mDecayTime = 1000; // Default decay time
int mDecayHfRatio = 500; // Default decay hf ratio
int mLevel = -6000; // Default level
int mDelay = 40; // Default delay
int mReflectionsLevelMb = 0;
int mReflectionsDelayMs = 0;
int mDiffusion = 1000; // Default diffusion
int mDensity = 1000; // Default density
bool mBypass = false; // Default bypass
};
class EnvReverbSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
EnvReverbSw() { LOG(DEBUG) << __func__; }
~EnvReverbSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
std::string getEffectName() override { return kEffectName; }
private:
static const std::vector<Range::EnvironmentalReverbRange> kRanges;
std::shared_ptr<EnvReverbSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libequalizersw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"EqualizerSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,218 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#define LOG_TAG "AHAL_EqualizerSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "EqualizerSw.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EqualizerSw;
using aidl::android::hardware::audio::effect::getEffectImplUuidEqualizerSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEqualizerSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<EqualizerSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEqualizerSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = EqualizerSw::kDesc;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string EqualizerSw::kEffectName = "EqualizerSw";
const std::vector<Equalizer::BandFrequency> EqualizerSw::kBandFrequency = {{0, 30000, 120000},
{1, 120001, 460000},
{2, 460001, 1800000},
{3, 1800001, 7000000},
{4, 7000001, 20000000}};
const std::vector<Equalizer::Preset> EqualizerSw::kPresets = {
{0, "Normal"}, {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
{5, "Heavy Metal"}, {6, "Hip Hop"}, {7, "Jazz"}, {8, "Pop"}, {9, "Rock"}};
/**
* Use the same min and max to build a capability represented by Range.
*/
const std::vector<Range::EqualizerRange> EqualizerSw::kRanges = {
MAKE_RANGE(Equalizer, preset, 0, EqualizerSw::kPresets.size() - 1),
MAKE_RANGE(Equalizer, bandLevels,
std::vector<Equalizer::BandLevel>{
Equalizer::BandLevel({.index = 0, .levelMb = -15})},
std::vector<Equalizer::BandLevel>{Equalizer::BandLevel(
{.index = EqualizerSwContext::kMaxBandNumber - 1, .levelMb = 15})}),
/* capability definition */
MAKE_RANGE(Equalizer, bandFrequencies, EqualizerSw::kBandFrequency,
EqualizerSw::kBandFrequency),
MAKE_RANGE(Equalizer, presets, EqualizerSw::kPresets, EqualizerSw::kPresets),
/* centerFreqMh is get only, set invalid range min > max */
MAKE_RANGE(Equalizer, centerFreqMh, std::vector<int>({1}), std::vector<int>({0}))};
const Capability EqualizerSw::kEqCap = {.range = EqualizerSw::kRanges};
const Descriptor EqualizerSw::kDesc = {.common = {.id = {.type = getEffectTypeUuidEqualizer(),
.uuid = getEffectImplUuidEqualizerSw()},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = EqualizerSw::kEffectName,
.implementor = "The Android Open Source Project"},
.capability = EqualizerSw::kEqCap};
ndk::ScopedAStatus EqualizerSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDesc.toString();
*_aidl_return = kDesc;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EqualizerSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::equalizer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& eqParam = specific.get<Parameter::Specific::equalizer>();
RETURN_IF(!inRange(eqParam, kRanges), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto tag = eqParam.getTag();
switch (tag) {
case Equalizer::preset: {
RETURN_IF(mContext->setEqPreset(eqParam.get<Equalizer::preset>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
return ndk::ScopedAStatus::ok();
}
case Equalizer::bandLevels: {
RETURN_IF(mContext->setEqBandLevels(eqParam.get<Equalizer::bandLevels>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setBandLevelsFailed");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"EqTagNotSupported");
}
}
LOG(ERROR) << __func__ << " unsupported eq param tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"ParamNotSupported");
}
ndk::ScopedAStatus EqualizerSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::equalizerTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto eqId = id.get<Parameter::Id::equalizerTag>();
auto eqIdTag = eqId.getTag();
switch (eqIdTag) {
case Equalizer::Id::commonTag:
return getParameterEqualizer(eqId.get<Equalizer::Id::commonTag>(), specific);
default:
LOG(ERROR) << __func__ << " tag " << toString(eqIdTag) << " not supported";
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"EqualizerTagNotSupported");
}
}
ndk::ScopedAStatus EqualizerSw::getParameterEqualizer(const Equalizer::Tag& tag,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
Equalizer eqParam;
switch (tag) {
case Equalizer::bandLevels: {
eqParam.set<Equalizer::bandLevels>(mContext->getEqBandLevels());
break;
}
case Equalizer::preset: {
eqParam.set<Equalizer::preset>(mContext->getEqPreset());
break;
}
case Equalizer::centerFreqMh: {
eqParam.set<Equalizer::centerFreqMh>(mContext->getCenterFreqs());
break;
}
case Equalizer::bandFrequencies: {
eqParam.set<Equalizer::bandFrequencies>(kBandFrequency);
break;
}
case Equalizer::presets: {
eqParam.set<Equalizer::presets>(kPresets);
break;
}
default: {
LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"unsupportedTag");
}
}
specific->set<Parameter::Specific::equalizer>(eqParam);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> EqualizerSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<EqualizerSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode EqualizerSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status EqualizerSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,122 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <cstdlib>
#include <memory>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class EqualizerSwContext final : public EffectContext {
public:
EqualizerSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setEqPreset(const int& presetIdx) {
if (presetIdx < 0 || presetIdx >= kMaxPresetNumber) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
mPreset = presetIdx;
return RetCode::SUCCESS;
}
int getEqPreset() { return mPreset; }
RetCode setEqBandLevels(const std::vector<Equalizer::BandLevel>& bandLevels) {
if (bandLevels.size() > kMaxBandNumber) {
LOG(ERROR) << __func__ << " return because size exceed " << kMaxBandNumber;
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
RetCode ret = RetCode::SUCCESS;
for (auto& it : bandLevels) {
if (it.index >= kMaxBandNumber || it.index < 0) {
LOG(ERROR) << __func__ << " index illegal, skip: " << it.index << " - "
<< it.levelMb;
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
} else {
mBandLevels[it.index] = it.levelMb;
}
}
return ret;
}
std::vector<Equalizer::BandLevel> getEqBandLevels() {
std::vector<Equalizer::BandLevel> bandLevels;
for (int i = 0; i < kMaxBandNumber; i++) {
bandLevels.push_back({i, mBandLevels[i]});
}
return bandLevels;
}
std::vector<int> getCenterFreqs() {
return {std::begin(kPresetsFrequencies), std::end(kPresetsFrequencies)};
}
static const int kMaxBandNumber = 5;
static const int kMaxPresetNumber = 10;
static const int kCustomPreset = -1;
private:
static constexpr std::array<uint16_t, kMaxBandNumber> kPresetsFrequencies = {60, 230, 910, 3600,
14000};
// preset band level
int mPreset = kCustomPreset;
int32_t mBandLevels[kMaxBandNumber] = {3, 0, 0, 0, 3};
// Add equalizer specific context for processing here
};
class EqualizerSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kEqCap;
static const Descriptor kDesc;
EqualizerSw() { LOG(DEBUG) << __func__; }
~EqualizerSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; }
private:
static const std::vector<Equalizer::BandFrequency> kBandFrequency;
static const std::vector<Equalizer::Preset> kPresets;
static const std::vector<Range::EqualizerRange> kRanges;
ndk::ScopedAStatus getParameterEqualizer(const Equalizer::Tag& tag,
Parameter::Specific* specific) REQUIRES(mImplMutex);
std::shared_ptr<EqualizerSwContext> mContext;
};
} // namespace aidl::android::hardware::audio::effect

40
audio/eraser/Android.bp Normal file
View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "liberasersw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"Eraser.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

270
audio/eraser/Eraser.cpp Normal file
View File

@@ -0,0 +1,270 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "AHAL_Eraser"
#include "Eraser.h"
#include <android-base/logging.h>
#include <system/audio_effects/effect_uuid.h>
#include <optional>
using aidl::android::hardware::audio::common::getChannelCount;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EraserSw;
using aidl::android::hardware::audio::effect::getEffectImplUuidEraserSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEraser;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEraserSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (!instanceSpp) {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
*instanceSpp = ndk::SharedRefBase::make<EraserSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidEraserSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = EraserSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string EraserSw::kEffectName = "EraserSw";
const Descriptor EraserSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidEraser(), .uuid = getEffectImplUuidEraserSw()},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.hwAcceleratorMode = Flags::HardwareAccelerator::NONE},
.name = EraserSw::kEffectName,
.implementor = "The Android Open Source Project"}};
ndk::ScopedAStatus EraserSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EraserSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::eraser != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& param = specific.get<Parameter::Specific::eraser>();
return mContext->setParam(param.getTag(), param);
}
ndk::ScopedAStatus EraserSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto tag = id.getTag();
RETURN_IF(Parameter::Id::eraserTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto eraserId = id.get<Parameter::Id::eraserTag>();
auto eraserTag = eraserId.getTag();
switch (eraserTag) {
case Eraser::Id::commonTag: {
auto specificTag = eraserId.get<Eraser::Id::commonTag>();
std::optional<Eraser> param = mContext->getParam(specificTag);
if (!param.has_value()) {
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"EraserTagNotSupported");
}
specific->set<Parameter::Specific::eraser>(param.value());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"EraserTagNotSupported");
}
}
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> EraserSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<EraserSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode EraserSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
EraserSw::~EraserSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus EraserSw::command(CommandId command) {
std::lock_guard lg(mImplMutex);
RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "instanceNotOpen");
switch (command) {
case CommandId::START:
RETURN_OK_IF(mState == State::PROCESSING);
mState = State::PROCESSING;
mContext->enable();
startThread();
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
break;
case CommandId::STOP:
RETURN_OK_IF(mState == State::IDLE || mState == State::DRAINING);
if (mVersion < kDrainSupportedVersion) {
mState = State::IDLE;
stopThread();
mContext->disable();
} else {
mState = State::DRAINING;
startDraining();
mContext->startDraining();
}
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
break;
case CommandId::RESET:
mState = State::IDLE;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagNotEmptyFailed");
stopThread();
mImplContext->disable();
mImplContext->reset();
mImplContext->resetBuffer();
break;
default:
LOG(ERROR) << getEffectNameWithVersion() << __func__ << " instance still processing";
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"CommandIdNotSupported");
}
LOG(VERBOSE) << getEffectNameWithVersion() << __func__
<< " transfer to state: " << toString(mState);
return ndk::ScopedAStatus::ok();
}
// Processing method running in EffectWorker thread.
IEffect::Status EraserSw::effectProcessImpl(float* in, float* out, int samples) {
RETURN_VALUE_IF(!mContext, (IEffect::Status{EX_NULL_POINTER, 0, 0}), "nullContext");
IEffect::Status procStatus{STATUS_NOT_ENOUGH_DATA, 0, 0};
procStatus = mContext->process(in, out, samples);
if (mState == State::DRAINING && procStatus.status == STATUS_NOT_ENOUGH_DATA) {
drainingComplete_l();
}
return procStatus;
}
void EraserSw::drainingComplete_l() {
if (mState != State::DRAINING) return;
LOG(DEBUG) << getEffectNameWithVersion() << __func__;
finishDraining();
mState = State::IDLE;
}
EraserSwContext::EraserSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
EraserSwContext::~EraserSwContext() {
LOG(DEBUG) << __func__;
}
template <typename TAG>
std::optional<Eraser> EraserSwContext::getParam(TAG tag) {
if (mParamsMap.find(tag) != mParamsMap.end()) {
return mParamsMap.at(tag);
}
return std::nullopt;
}
template <typename TAG>
ndk::ScopedAStatus EraserSwContext::setParam(TAG tag, Eraser eraser) {
mParamsMap[tag] = eraser;
return ndk::ScopedAStatus::ok();
}
IEffect::Status EraserSwContext::process(float* in, float* out, int samples) {
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
IEffect::Status procStatus = {EX_ILLEGAL_ARGUMENT, 0, 0};
const auto inputChannelCount = getChannelCount(mCommon.input.base.channelMask);
const auto outputChannelCount = getChannelCount(mCommon.output.base.channelMask);
if (inputChannelCount < outputChannelCount) {
LOG(ERROR) << __func__ << " invalid channel count, in: " << inputChannelCount
<< " out: " << outputChannelCount;
return procStatus;
}
if (samples <= 0 || 0 != samples % inputChannelCount) {
LOG(ERROR) << __func__ << " invalid samples: " << samples;
return procStatus;
}
const int iFrames = samples / inputChannelCount;
const float gainPerSample = 1.f / iFrames;
for (int i = 0; i < iFrames; i++) {
if (isDraining()) {
const float gain = (iFrames - i - 1) * gainPerSample;
for (size_t c = 0; c < outputChannelCount; c++) {
out[c] = in[c] * gain;
}
} else {
std::memcpy(out, in, outputChannelCount * sizeof(float));
}
in += inputChannelCount;
out += outputChannelCount;
}
// drain for one cycle
if (isDraining()) {
procStatus.status = STATUS_NOT_ENOUGH_DATA;
finishDraining();
} else {
procStatus.status = STATUS_OK;
}
procStatus.fmqConsumed = static_cast<int32_t>(iFrames * inputChannelCount);
procStatus.fmqProduced = static_cast<int32_t>(iFrames * outputChannelCount);
return procStatus;
}
} // namespace aidl::android::hardware::audio::effect

73
audio/eraser/Eraser.h Normal file
View File

@@ -0,0 +1,73 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "effect-impl/EffectContext.h"
#include "effect-impl/EffectImpl.h"
#include <fmq/AidlMessageQueue.h>
#include <unordered_map>
#include <vector>
namespace aidl::android::hardware::audio::effect {
class EraserSwContext final : public EffectContext {
public:
EraserSwContext(int statusDepth, const Parameter::Common& common);
~EraserSwContext() final;
template <typename TAG>
std::optional<Eraser> getParam(TAG tag);
template <typename TAG>
ndk::ScopedAStatus setParam(TAG tag, Eraser eraser);
IEffect::Status process(float* in, float* out, int samples);
private:
std::unordered_map<Eraser::Tag, Eraser> mParamsMap;
};
class EraserSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
~EraserSw() final;
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) final;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) final;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) final;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) final;
RetCode releaseContext() REQUIRES(mImplMutex) final;
std::string getEffectName() final { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) final;
ndk::ScopedAStatus command(CommandId command) final;
void drainingComplete_l() REQUIRES(mImplMutex);
private:
static const std::vector<Range::SpatializerRange> kRanges;
std::shared_ptr<EraserSwContext> mContext GUARDED_BY(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libextensioneffect",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"ExtensionEffect.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default:__subpackages__",
],
}

View File

@@ -0,0 +1,143 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#include <memory>
#include <unordered_set>
#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
#define LOG_TAG "AHAL_ExtensionEffect"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "ExtensionEffect.h"
using aidl::android::hardware::audio::effect::DefaultExtension;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::ExtensionEffect;
using aidl::android::hardware::audio::effect::getEffectImplUuidExtension;
using aidl::android::hardware::audio::effect::getEffectTypeUuidExtension;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::Range;
using aidl::android::hardware::audio::effect::VendorExtension;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidExtension()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<ExtensionEffect>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidExtension()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = ExtensionEffect::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string ExtensionEffect::kEffectName = "ExtensionEffectExample";
const Descriptor ExtensionEffect::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidExtension(),
.uuid = getEffectImplUuidExtension(),
.proxy = std::nullopt},
.name = ExtensionEffect::kEffectName,
.implementor = "The Android Open Source Project"}};
ndk::ScopedAStatus ExtensionEffect::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ExtensionEffect::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::vendorEffect != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& vendorEffect = specific.get<Parameter::Specific::vendorEffect>();
std::optional<DefaultExtension> defaultExt;
RETURN_IF(STATUS_OK != vendorEffect.extension.getParcelable(&defaultExt), EX_ILLEGAL_ARGUMENT,
"getParcelableFailed");
RETURN_IF(!defaultExt.has_value(), EX_ILLEGAL_ARGUMENT, "parcelableNull");
RETURN_IF(mContext->setParams(defaultExt->bytes) != RetCode::SUCCESS, EX_ILLEGAL_ARGUMENT,
"paramNotSupported");
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ExtensionEffect::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::vendorEffectTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto extensionId = id.get<Parameter::Id::vendorEffectTag>();
std::optional<DefaultExtension> defaultIdExt;
RETURN_IF(STATUS_OK != extensionId.extension.getParcelable(&defaultIdExt), EX_ILLEGAL_ARGUMENT,
"getIdParcelableFailed");
RETURN_IF(!defaultIdExt.has_value(), EX_ILLEGAL_ARGUMENT, "parcelableIdNull");
VendorExtension extension;
DefaultExtension defaultExt;
defaultExt.bytes = mContext->getParams(defaultIdExt->bytes);
RETURN_IF(STATUS_OK != extension.extension.setParcelable(defaultExt), EX_ILLEGAL_ARGUMENT,
"setParcelableFailed");
specific->set<Parameter::Specific::vendorEffect>(extension);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> ExtensionEffect::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<ExtensionEffectContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode ExtensionEffect::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status ExtensionEffect::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,73 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include <memory>
#include <vector>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class ExtensionEffectContext final : public EffectContext {
public:
ExtensionEffectContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setParams(const std::vector<uint8_t>& params) {
mParams = params;
return RetCode::SUCCESS;
}
std::vector<uint8_t> getParams(std::vector<uint8_t> id __unused) const { return mParams; }
private:
std::vector<uint8_t> mParams;
};
class ExtensionEffect final : public EffectImpl {
public:
static const std::string kEffectName;
static const Capability kCapability;
static const Descriptor kDescriptor;
ExtensionEffect() { LOG(DEBUG) << __func__; }
~ExtensionEffect() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; };
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
private:
std::shared_ptr<ExtensionEffectContext> mContext GUARDED_BY(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {
default_team: "trendy_team_android_media_audio_framework",
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "libhapticgeneratorsw",
defaults: [
"aidlaudioeffectservice_defaults",
],
srcs: [
"HapticGeneratorSw.cpp",
":effectCommonFile",
],
relative_install_path: "soundfx",
visibility: [
"//hardware/interfaces/audio/aidl/default",
],
}

View File

@@ -0,0 +1,194 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <cstddef>
#define LOG_TAG "AHAL_HapticGeneratorSw"
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <system/audio_effects/effect_uuid.h>
#include "HapticGeneratorSw.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::getEffectImplUuidHapticGeneratorSw;
using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator;
using aidl::android::hardware::audio::effect::HapticGeneratorSw;
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGeneratorSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
if (instanceSpp) {
*instanceSpp = ndk::SharedRefBase::make<HapticGeneratorSw>();
LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
return EX_NONE;
} else {
LOG(ERROR) << __func__ << " invalid input parameter!";
return EX_ILLEGAL_ARGUMENT;
}
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGeneratorSw()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
*_aidl_return = HapticGeneratorSw::kDescriptor;
return EX_NONE;
}
namespace aidl::android::hardware::audio::effect {
const std::string HapticGeneratorSw::kEffectName = "HapticGeneratorSw";
/* Effect descriptor */
const Descriptor HapticGeneratorSw::kDescriptor = {
.common = {.id = {.type = getEffectTypeUuidHapticGenerator(),
.uuid = getEffectImplUuidHapticGeneratorSw(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
.volume = Flags::Volume::CTRL},
.name = HapticGeneratorSw::kEffectName,
.implementor = "The Android Open Source Project"}};
ndk::ScopedAStatus HapticGeneratorSw::getDescriptor(Descriptor* _aidl_return) {
LOG(DEBUG) << __func__ << kDescriptor.toString();
*_aidl_return = kDescriptor;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus HapticGeneratorSw::setParameterSpecific(const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
auto& hgParam = specific.get<Parameter::Specific::hapticGenerator>();
auto tag = hgParam.getTag();
switch (tag) {
case HapticGenerator::hapticScales: {
RETURN_IF(mContext->setHgHapticScales(hgParam.get<HapticGenerator::hapticScales>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "HapticScaleNotSupported");
return ndk::ScopedAStatus::ok();
}
case HapticGenerator::vibratorInfo: {
RETURN_IF(mContext->setHgVibratorInformation(
hgParam.get<HapticGenerator::vibratorInfo>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "VibratorInfoNotSupported");
return ndk::ScopedAStatus::ok();
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
}
}
}
ndk::ScopedAStatus HapticGeneratorSw::getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) {
auto tag = id.getTag();
RETURN_IF(Parameter::Id::hapticGeneratorTag != tag, EX_ILLEGAL_ARGUMENT, "wrongIdTag");
auto hgId = id.get<Parameter::Id::hapticGeneratorTag>();
auto hgIdTag = hgId.getTag();
switch (hgIdTag) {
case HapticGenerator::Id::commonTag:
return getParameterHapticGenerator(hgId.get<HapticGenerator::Id::commonTag>(),
specific);
default:
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
}
}
ndk::ScopedAStatus HapticGeneratorSw::getParameterHapticGenerator(const HapticGenerator::Tag& tag,
Parameter::Specific* specific) {
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
HapticGenerator hgParam;
switch (tag) {
case HapticGenerator::hapticScales: {
hgParam.set<HapticGenerator::hapticScales>(mContext->getHgHapticScales());
break;
}
case HapticGenerator::vibratorInfo: {
hgParam.set<HapticGenerator::vibratorInfo>(mContext->getHgVibratorInformation());
break;
}
default: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "HapticGeneratorTagNotSupported");
}
}
specific->set<Parameter::Specific::hapticGenerator>(hgParam);
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> HapticGeneratorSw::createContext(const Parameter::Common& common) {
if (mContext) {
LOG(DEBUG) << __func__ << " context already exist";
} else {
mContext = std::make_shared<HapticGeneratorSwContext>(1 /* statusFmqDepth */, common);
}
return mContext;
}
RetCode HapticGeneratorSw::releaseContext() {
if (mContext) {
mContext.reset();
}
return RetCode::SUCCESS;
}
// Processing method running in EffectWorker thread.
IEffect::Status HapticGeneratorSw::effectProcessImpl(float* in, float* out, int samples) {
// TODO: get data buffer and process.
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " samples " << samples;
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
return {STATUS_OK, samples, samples};
}
RetCode HapticGeneratorSwContext::setHgHapticScales(
const std::vector<HapticGenerator::HapticScale>& hapticScales) {
// Assume any audio track ID is valid
for (auto& it : hapticScales) {
mHapticScales[it.id] = it;
}
return RetCode::SUCCESS;
}
std::vector<HapticGenerator::HapticScale> HapticGeneratorSwContext::getHgHapticScales() const {
std::vector<HapticGenerator::HapticScale> result;
std::transform(mHapticScales.begin(), mHapticScales.end(), std::back_inserter(result),
[](auto& scaleIt) { return scaleIt.second; });
return result;
}
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,90 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cstdlib>
#include <map>
#include <memory>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
class HapticGeneratorSwContext final : public EffectContext {
public:
HapticGeneratorSwContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
}
RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
std::vector<HapticGenerator::HapticScale> getHgHapticScales() const;
RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo) {
// All float values are valid for resonantFrequencyHz, qFactor, maxAmplitude
mVibratorInformation = vibratorInfo;
return RetCode::SUCCESS;
}
HapticGenerator::VibratorInformation getHgVibratorInformation() const {
return mVibratorInformation;
}
private:
static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
static constexpr float DEFAULT_Q_FACTOR = 1.0f;
static constexpr float DEFAULT_MAX_AMPLITUDE = 0.0f;
std::map<int /* trackID */, HapticGenerator::HapticScale> mHapticScales;
HapticGenerator::VibratorInformation mVibratorInformation = {
DEFAULT_RESONANT_FREQUENCY, DEFAULT_Q_FACTOR, DEFAULT_MAX_AMPLITUDE};
};
class HapticGeneratorSw final : public EffectImpl {
public:
static const std::string kEffectName;
static const Descriptor kDescriptor;
HapticGeneratorSw() { LOG(DEBUG) << __func__; }
~HapticGeneratorSw() {
cleanUp();
LOG(DEBUG) << __func__;
}
ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
REQUIRES(mImplMutex) override;
ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id, Parameter::Specific* specific)
REQUIRES(mImplMutex) override;
std::shared_ptr<EffectContext> createContext(const Parameter::Common& common)
REQUIRES(mImplMutex) override;
RetCode releaseContext() REQUIRES(mImplMutex) override;
IEffect::Status effectProcessImpl(float* in, float* out, int samples)
REQUIRES(mImplMutex) override;
std::string getEffectName() override { return kEffectName; }
private:
std::shared_ptr<HapticGeneratorSwContext> mContext GUARDED_BY(mImplMutex);
ndk::ScopedAStatus getParameterHapticGenerator(const HapticGenerator::Tag& tag,
Parameter::Specific* specific)
REQUIRES(mImplMutex);
};
} // namespace aidl::android::hardware::audio::effect

View File

@@ -0,0 +1,32 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/core/SurroundSoundConfig.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <android_audio_policy_configuration.h>
#include <media/AidlConversionUtil.h>
namespace aidl::android::hardware::audio::core::internal {
ConversionResult<::aidl::android::media::audio::common::AudioFormatDescription>
xsdc2aidl_AudioFormatDescription(const std::string& xsdc);
ConversionResult<SurroundSoundConfig> xsdc2aidl_SurroundSoundConfig(
const ::android::audio::policy::configuration::SurroundSound& xsdc);
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,84 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <memory>
#include <optional>
#include <string>
#include <unordered_map>
#include <utility>
#include <vector>
#include <aidl/android/hardware/audio/core/SurroundSoundConfig.h>
#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
#include <android_audio_policy_configuration.h>
#include <android_audio_policy_configuration_enums.h>
#include <media/AidlConversionUtil.h>
#include "core-impl/Module.h"
#include "core-impl/XmlConverter.h"
namespace aidl::android::hardware::audio::core::internal {
class AudioPolicyConfigXmlConverter {
public:
using ModuleConfiguration = std::pair<std::string, std::unique_ptr<Module::Configuration>>;
using ModuleConfigs = std::vector<ModuleConfiguration>;
explicit AudioPolicyConfigXmlConverter(const std::string& configFilePath)
: mConverter(configFilePath, &::android::audio::policy::configuration::read) {
if (mConverter.getXsdcConfig()) {
init();
}
}
std::string getError() const { return mConverter.getError(); }
::android::status_t getStatus() const { return mConverter.getStatus(); }
const ::aidl::android::media::audio::common::AudioHalEngineConfig& getAidlEngineConfig();
const SurroundSoundConfig& getSurroundSoundConfig();
std::unique_ptr<ModuleConfigs> releaseModuleConfigs();
// Public for testing purposes.
static const SurroundSoundConfig& getDefaultSurroundSoundConfig();
private:
const std::optional<::android::audio::policy::configuration::AudioPolicyConfiguration>&
getXsdcConfig() const {
return mConverter.getXsdcConfig();
}
void addVolumeGroupstoEngineConfig();
void init();
void mapStreamToVolumeCurve(
const ::android::audio::policy::configuration::Volume& xsdcVolumeCurve);
void mapStreamsToVolumeCurves();
void parseVolumes();
ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeCurve>
convertVolumeCurveToAidl(
const ::android::audio::policy::configuration::Volume& xsdcVolumeCurve);
::aidl::android::media::audio::common::AudioHalEngineConfig mAidlEngineConfig;
XmlConverter<::android::audio::policy::configuration::AudioPolicyConfiguration> mConverter;
std::unordered_map<std::string, ::android::audio::policy::configuration::Reference>
mVolumesReferenceMap;
std::unordered_map<::android::audio::policy::configuration::AudioStreamType,
std::vector<::aidl::android::media::audio::common::AudioHalVolumeCurve>>
mStreamToVolumeCurvesMap;
std::unique_ptr<ModuleConfigs> mModuleConfigurations = std::make_unique<ModuleConfigs>();
};
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,76 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/core/BnBluetooth.h>
#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
namespace aidl::android::hardware::audio::core {
class ParamChangeHandler {
public:
ParamChangeHandler() = default;
void registerHandler(std::function<ndk::ScopedAStatus()> handler) { mHandler = handler; }
protected:
std::function<ndk::ScopedAStatus()> mHandler = nullptr;
};
class Bluetooth : public BnBluetooth {
public:
Bluetooth();
private:
ndk::ScopedAStatus setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) override;
ndk::ScopedAStatus setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) override;
ScoConfig mScoConfig;
HfpConfig mHfpConfig;
};
class BluetoothA2dp : public BnBluetoothA2dp, public ParamChangeHandler {
public:
BluetoothA2dp() = default;
ndk::ScopedAStatus isEnabled(bool* _aidl_return) override;
private:
ndk::ScopedAStatus setEnabled(bool in_enabled) override;
ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override;
ndk::ScopedAStatus reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters) override;
bool mEnabled = false;
};
class BluetoothLe : public BnBluetoothLe, public ParamChangeHandler {
public:
BluetoothLe() = default;
ndk::ScopedAStatus isEnabled(bool* _aidl_return) override;
private:
ndk::ScopedAStatus setEnabled(bool in_enabled) override;
ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override;
ndk::ScopedAStatus reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters) override;
bool mEnabled = false;
};
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,76 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/core/BnConfig.h>
#include <aidl/android/media/audio/common/AudioHalEngineConfig.h>
#include <system/audio_config.h>
#include <android_audio_policy_capengine_configuration.h>
#include <android_audio_policy_capengine_configuration_enums.h>
#include "EngineConfigXmlConverter.h"
namespace aidl::android::hardware::audio::core::internal {
namespace capconfiguration = ::android::audio::policy::capengine::configuration;
namespace aidlcommon = ::aidl::android::media::audio::common;
class CapEngineConfigXmlConverter {
public:
explicit CapEngineConfigXmlConverter(const std::string& configFilePath)
: mConverter(configFilePath, &capconfiguration::readConfigurableDomains) {
if (mConverter.getXsdcConfig()) {
init();
}
}
std::string getError() const { return mConverter.getError(); }
::android::status_t getStatus() const { return mConverter.getStatus(); }
std::optional<
std::vector<std::optional<::aidl::android::media::audio::common::AudioHalCapDomain>>>&
getAidlCapEngineConfig();
private:
ConversionResult<std::vector<aidlcommon::AudioHalCapParameter>> convertSettingToAidl(
const capconfiguration::SettingsType::Configuration& xsdcSetting);
ConversionResult<std::vector<aidlcommon::AudioHalCapConfiguration>> convertConfigurationsToAidl(
const std::vector<capconfiguration::ConfigurationsType>& xsdcConfigurationsVec,
const std::vector<capconfiguration::SettingsType>& xsdcSettingsVec);
ConversionResult<aidlcommon::AudioHalCapConfiguration> convertConfigurationToAidl(
const capconfiguration::ConfigurationsType::Configuration& xsdcConfiguration,
const capconfiguration::SettingsType::Configuration& xsdcSettingConfiguration);
ConversionResult<aidlcommon::AudioHalCapParameter> convertParamToAidl(
const capconfiguration::ConfigurableElementSettingsType& element);
ConversionResult<aidlcommon::AudioHalCapConfiguration> convertConfigurationToAidl(
const capconfiguration::ConfigurationsType::Configuration& xsdcConfiguration);
ConversionResult<aidlcommon::AudioHalCapDomain> convertConfigurableDomainToAidl(
const capconfiguration::ConfigurableDomainType& xsdcConfigurableDomain);
const std::optional<capconfiguration::ConfigurableDomains>& getXsdcConfig() {
return mConverter.getXsdcConfig();
}
void init();
std::optional<std::vector<std::optional<aidlcommon::AudioHalCapDomain>>> mAidlCapDomains;
XmlConverter<capconfiguration::ConfigurableDomains> mConverter;
};
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,59 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <memory>
#include <utility>
#include <android/binder_auto_utils.h>
#include <android/binder_ibinder_platform.h>
#include <system/thread_defs.h>
namespace aidl::android::hardware::audio::core {
// Helper used for interfaces that require a persistent instance. We hold them via a strong
// pointer. The binder token is retained for a call to 'setMinSchedulerPolicy'.
template <class C>
struct ChildInterface : private std::pair<std::shared_ptr<C>, ndk::SpAIBinder> {
ChildInterface() = default;
ChildInterface& operator=(const std::shared_ptr<C>& c) {
return operator=(std::shared_ptr<C>(c));
}
ChildInterface& operator=(std::shared_ptr<C>&& c) {
this->first = std::move(c);
return *this;
}
explicit operator bool() const { return !!this->first; }
C& operator*() const { return *(this->first); }
C* operator->() const { return this->first; }
std::shared_ptr<C> getPtr() { return this->first; }
// Use 'getInstance' when returning the interface instance.
std::shared_ptr<C> getInstance() {
(void)getBinder();
return this->first;
}
AIBinder* getBinder() {
if (this->second.get() == nullptr) {
const auto binder = this->second = this->first->asBinder();
AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
AIBinder_setInheritRt(binder.get(), true);
}
return this->second.get();
}
};
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,43 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/audio/core/BnConfig.h>
#include <system/audio_config.h>
#include "AudioPolicyConfigXmlConverter.h"
#include "EngineConfigXmlConverter.h"
namespace aidl::android::hardware::audio::core {
static const std::string kEngineConfigFileName = "audio_policy_engine_configuration.xml";
class Config : public BnConfig {
public:
explicit Config(internal::AudioPolicyConfigXmlConverter& apConverter)
: mAudioPolicyConverter(apConverter) {}
private:
ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig* _aidl_return) override;
ndk::ScopedAStatus getEngineConfig(
aidl::android::media::audio::common::AudioHalEngineConfig* _aidl_return) override;
internal::AudioPolicyConfigXmlConverter& mAudioPolicyConverter;
internal::EngineConfigXmlConverter mEngConfigConverter{
::android::audio_find_readable_configuration_file(kEngineConfigFileName.c_str())};
};
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,29 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <memory>
#include "Module.h"
namespace aidl::android::hardware::audio::core::internal {
std::unique_ptr<Module::Configuration> getConfiguration(Module::Type moduleType);
std::vector<aidl::android::media::audio::common::AudioProfile>
getStandard16And24BitPcmAudioProfiles();
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,237 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <condition_variable>
#include <mutex>
#include <android-base/thread_annotations.h>
#include <aidl/android/hardware/audio/common/SinkMetadata.h>
#include <aidl/android/hardware/audio/common/SourceMetadata.h>
#include <aidl/android/hardware/bluetooth/audio/BluetoothAudioStatus.h>
#include <aidl/android/hardware/bluetooth/audio/PcmConfiguration.h>
#include <aidl/android/hardware/bluetooth/audio/PresentationPosition.h>
#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
#include <aidl/android/media/audio/common/AudioDeviceDescription.h>
namespace android::bluetooth::audio::aidl {
enum class BluetoothStreamState : uint8_t {
DISABLED = 0, // This stream is closing or Bluetooth profiles (A2DP/LE) is disabled
STANDBY,
STARTING,
STARTED,
SUSPENDING,
UNKNOWN,
};
std::ostream& operator<<(std::ostream& os, const BluetoothStreamState& state);
/**
* Proxy for Bluetooth Audio HW Module to communicate with Bluetooth Audio
* Session Control. All methods are not thread safe, so users must acquire a
* lock. Note: currently, getState() of DevicePortProxy is only used for
* verbose logging, it is not locked, so the state may not be synchronized.
*/
class BluetoothAudioPort {
public:
BluetoothAudioPort() = default;
virtual ~BluetoothAudioPort() = default;
/**
* Fetch output control / data path of BluetoothAudioPort and setup
* callbacks into BluetoothAudioProvider. If registerPort() returns false, the audio
* HAL must delete this BluetoothAudioPort and return EINVAL to caller
*/
virtual bool registerPort(
const ::aidl::android::media::audio::common::AudioDeviceDescription&) = 0;
/**
* Unregister this BluetoothAudioPort from BluetoothAudioSessionControl.
* Audio HAL must delete this BluetoothAudioPort after calling this.
*/
virtual void unregisterPort() = 0;
/**
* When the Audio framework / HAL tries to query audio config about format,
* channel mask and sample rate, it uses this function to fetch from the
* Bluetooth stack
*/
virtual bool loadAudioConfig(
::aidl::android::hardware::bluetooth::audio::PcmConfiguration&) = 0;
/**
* When the Audio framework / HAL wants to change the stream state, it invokes
* these 4 functions to control the Bluetooth stack (Audio Control Path).
* Note: standby(), start() and suspend() will return true when there are no errors.
* Called by Audio framework / HAL to change the state to stand by. When A2DP/LE profile is
* disabled, the port is first set to STANDBY by calling suspend and then mState is set to
* DISABLED. To reset the state back to STANDBY this method is called.
*/
virtual bool standby() = 0;
/**
* Called by Audio framework / HAL to start the stream
*/
virtual bool start() = 0;
/**
* Called by Audio framework / HAL to suspend the stream
*/
virtual bool suspend() = 0;
/**
* Called by Audio framework / HAL to stop the stream
*/
virtual void stop() = 0;
/**
* Called by the Audio framework / HAL to fetch information about audio frames
* presented to an external sink, or frames presented fror an internal sink
*/
virtual bool getPresentationPosition(
::aidl::android::hardware::bluetooth::audio::PresentationPosition&) const = 0;
/**
* Called by the Audio framework / HAL when the metadata of the stream's
* source has been changed.
*/
virtual bool updateSourceMetadata(
const ::aidl::android::hardware::audio::common::SourceMetadata&) const {
return false;
}
/**
* Called by the Audio framework / HAL when the metadata of the stream's
* sink has been changed.
*/
virtual bool updateSinkMetadata(
const ::aidl::android::hardware::audio::common::SinkMetadata&) const {
return false;
}
/**
* Return the current BluetoothStreamState
*/
virtual BluetoothStreamState getState() const = 0;
/**
* Set the current BluetoothStreamState
*/
virtual bool setState(BluetoothStreamState) = 0;
virtual bool isA2dp() const = 0;
virtual bool isLeAudio() const = 0;
virtual bool getPreferredDataIntervalUs(size_t&) const = 0;
virtual size_t writeData(const void*, size_t) const { return 0; }
virtual size_t readData(void*, size_t) const { return 0; }
};
class BluetoothAudioPortAidl : public BluetoothAudioPort {
public:
BluetoothAudioPortAidl();
virtual ~BluetoothAudioPortAidl();
bool registerPort(const ::aidl::android::media::audio::common::AudioDeviceDescription&
description) override;
void unregisterPort() override;
bool loadAudioConfig(
::aidl::android::hardware::bluetooth::audio::PcmConfiguration& audio_cfg) override;
bool standby() override;
bool start() override;
bool suspend() override;
void stop() override;
bool getPresentationPosition(::aidl::android::hardware::bluetooth::audio::PresentationPosition&
presentation_position) const override;
bool updateSourceMetadata(const ::aidl::android::hardware::audio::common::SourceMetadata&
sourceMetadata) const override;
bool updateSinkMetadata(const ::aidl::android::hardware::audio::common::SinkMetadata&
sinkMetadata) const override;
/**
* Return the current BluetoothStreamState
* Note: This method is used for logging, does not lock, so value returned may not be latest
*/
BluetoothStreamState getState() const override NO_THREAD_SAFETY_ANALYSIS;
bool setState(BluetoothStreamState state) override;
bool isA2dp() const override;
bool isLeAudio() const override;
bool getPreferredDataIntervalUs(size_t& interval_us) const override;
protected:
uint16_t mCookie;
BluetoothStreamState mState GUARDED_BY(mCvMutex);
::aidl::android::hardware::bluetooth::audio::SessionType mSessionType;
// WR to support Mono: True if fetching Stereo and mixing into Mono
bool mIsStereoToMono = false;
bool inUse() const;
std::string debugMessage() const;
private:
// start()/suspend() report state change status via callback. Wait until kMaxWaitingTimeMs or a
// state change after a call to start()/suspend() and analyse the returned status. Below mutex,
// conditional variable serves this purpose.
mutable std::mutex mCvMutex;
std::condition_variable mInternalCv GUARDED_BY(mCvMutex);
// Check and initialize session type for |devices| If failed, this
// BluetoothAudioPortAidl is not initialized and must be deleted.
bool initSessionType(
const ::aidl::android::media::audio::common::AudioDeviceDescription& description);
bool condWaitState(BluetoothStreamState state);
void controlResultHandler(
uint16_t cookie,
const ::aidl::android::hardware::bluetooth::audio::BluetoothAudioStatus& status);
void sessionChangedHandler(uint16_t cookie);
};
class BluetoothAudioPortAidlOut : public BluetoothAudioPortAidl {
public:
bool loadAudioConfig(
::aidl::android::hardware::bluetooth::audio::PcmConfiguration& audio_cfg) override;
// The audio data path to the Bluetooth stack (Software encoding)
size_t writeData(const void* buffer, size_t bytes) const override;
};
class BluetoothAudioPortAidlIn : public BluetoothAudioPortAidl {
public:
// The audio data path from the Bluetooth stack (Software decoded)
size_t readData(void* buffer, size_t bytes) const override;
};
} // namespace android::bluetooth::audio::aidl

View File

@@ -0,0 +1,53 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "core-impl/Stream.h"
namespace aidl::android::hardware::audio::core {
class DriverStubImpl : virtual public DriverInterface {
public:
explicit DriverStubImpl(const StreamContext& context)
: DriverStubImpl(context, 500 /*asyncSleepTimeUs*/) {}
DriverStubImpl(const StreamContext& context, int asyncSleepTimeUs);
::android::status_t init(DriverCallbackInterface* callback) override;
::android::status_t drain(StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
::android::status_t standby() override;
::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
void shutdown() override;
protected:
const size_t mBufferSizeFrames;
const size_t mFrameSizeBytes;
const int mSampleRate;
const bool mIsAsynchronous;
const bool mIsInput;
const int32_t mMixPortHandle;
const int mAsyncSleepTimeUs;
bool mIsInitialized = false; // Used for validating the state machine logic.
bool mIsStandby = true; // Used for validating the state machine logic.
int64_t mStartTimeNs = 0;
long mFramesSinceStart = 0;
};
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,86 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string>
#include <unordered_map>
#include <android_audio_policy_engine_configuration.h>
#include <android_audio_policy_engine_configuration_enums.h>
#include <media/AidlConversionUtil.h>
#include "core-impl/XmlConverter.h"
namespace aidl::android::hardware::audio::core::internal {
class EngineConfigXmlConverter {
public:
explicit EngineConfigXmlConverter(const std::string& configFilePath)
: mConverter(configFilePath, &::android::audio::policy::engine::configuration::read) {
if (mConverter.getXsdcConfig()) {
init();
}
}
std::string getError() const { return mConverter.getError(); }
::android::status_t getStatus() const { return mConverter.getStatus(); }
::aidl::android::media::audio::common::AudioHalEngineConfig& getAidlEngineConfig();
private:
const std::optional<::android::audio::policy::engine::configuration::Configuration>&
getXsdcConfig() {
return mConverter.getXsdcConfig();
}
void init();
void initProductStrategyMap();
ConversionResult<::aidl::android::media::audio::common::AudioAttributes>
convertAudioAttributesToAidl(
const ::android::audio::policy::engine::configuration::AttributesType&
xsdcAudioAttributes);
ConversionResult<::aidl::android::media::audio::common::AudioHalAttributesGroup>
convertAttributesGroupToAidl(
const ::android::audio::policy::engine::configuration::AttributesGroup&
xsdcAttributesGroup);
ConversionResult<::aidl::android::media::audio::common::AudioHalProductStrategy>
convertProductStrategyToAidl(const ::android::audio::policy::engine::configuration::
ProductStrategies::ProductStrategy& xsdcProductStrategy);
ConversionResult<int> convertProductStrategyIdToAidl(int xsdcId);
ConversionResult<int> convertProductStrategyNameToAidl(
const std::string& xsdcProductStrategyName);
ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeCurve>
convertVolumeCurveToAidl(
const ::android::audio::policy::engine::configuration::Volume& xsdcVolumeCurve);
ConversionResult<::aidl::android::media::audio::common::AudioHalVolumeGroup>
convertVolumeGroupToAidl(
const ::android::audio::policy::engine::configuration::VolumeGroupsType::VolumeGroup&
xsdcVolumeGroup);
::aidl::android::media::audio::common::AudioHalEngineConfig mAidlEngineConfig;
XmlConverter<::android::audio::policy::engine::configuration::Configuration> mConverter;
std::unordered_map<std::string,
::android::audio::policy::engine::configuration::AttributesRefType>
mAttributesReferenceMap;
std::unordered_map<std::string, ::android::audio::policy::engine::configuration::VolumeRef>
mVolumesReferenceMap;
std::unordered_map<std::string, int> mProductStrategyMap;
int mNextVendorStrategy = ::aidl::android::media::audio::common::AudioHalProductStrategy::
VENDOR_STRATEGY_ID_START;
std::optional<int> mDefaultProductStrategyId;
};
} // namespace aidl::android::hardware::audio::core::internal

View File

@@ -0,0 +1,281 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <functional>
#include <iostream>
#include <map>
#include <memory>
#include <optional>
#include <set>
#include <Utils.h>
#include <aidl/android/hardware/audio/core/BnModule.h>
#include "core-impl/ChildInterface.h"
#include "core-impl/Stream.h"
namespace aidl::android::hardware::audio::core {
class Module : public BnModule {
public:
struct Configuration {
std::vector<::aidl::android::media::audio::common::AudioPort> ports;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> portConfigs;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> initialConfigs;
// Port id -> List of profiles to use when the device port state is set to 'connected'
// in connection simulation mode.
std::map<int32_t, std::vector<::aidl::android::media::audio::common::AudioProfile>>
connectedProfiles;
std::vector<AudioRoute> routes;
std::vector<AudioPatch> patches;
int32_t nextPortId = 1;
int32_t nextPatchId = 1;
};
enum Type : int { DEFAULT, R_SUBMIX, STUB, USB, BLUETOOTH };
static std::shared_ptr<Module> createInstance(Type type) {
return createInstance(type, std::make_unique<Configuration>());
}
static std::shared_ptr<Module> createInstance(Type type,
std::unique_ptr<Configuration>&& config);
static std::optional<Type> typeFromString(const std::string& type);
Module(Type type, std::unique_ptr<Configuration>&& config);
protected:
// The vendor extension done via inheritance can override interface methods and augment
// a call to the base implementation.
binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
ndk::ScopedAStatus setModuleDebug(
const ::aidl::android::hardware::audio::core::ModuleDebug& in_debug) override;
ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>* _aidl_return) override;
ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) override;
ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) override;
ndk::ScopedAStatus getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) override;
ndk::ScopedAStatus connectExternalDevice(
const ::aidl::android::media::audio::common::AudioPort& in_templateIdAndAdditionalData,
::aidl::android::media::audio::common::AudioPort* _aidl_return) override;
ndk::ScopedAStatus disconnectExternalDevice(int32_t in_portId) override;
ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t in_portId) override;
ndk::ScopedAStatus getAudioPatches(std::vector<AudioPatch>* _aidl_return) override;
ndk::ScopedAStatus getAudioPort(
int32_t in_portId,
::aidl::android::media::audio::common::AudioPort* _aidl_return) override;
ndk::ScopedAStatus getAudioPortConfigs(
std::vector<::aidl::android::media::audio::common::AudioPortConfig>* _aidl_return)
override;
ndk::ScopedAStatus getAudioPorts(
std::vector<::aidl::android::media::audio::common::AudioPort>* _aidl_return) override;
ndk::ScopedAStatus getAudioRoutes(std::vector<AudioRoute>* _aidl_return) override;
ndk::ScopedAStatus getAudioRoutesForAudioPort(
int32_t in_portId,
std::vector<::aidl::android::hardware::audio::core::AudioRoute>* _aidl_return) override;
ndk::ScopedAStatus openInputStream(
const ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments&
in_args,
::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn* _aidl_return)
override;
ndk::ScopedAStatus openOutputStream(
const ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments&
in_args,
::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn* _aidl_return)
override;
ndk::ScopedAStatus getSupportedPlaybackRateFactors(
SupportedPlaybackRateFactors* _aidl_return) override;
ndk::ScopedAStatus setAudioPatch(const AudioPatch& in_requested,
AudioPatch* _aidl_return) override;
ndk::ScopedAStatus setAudioPortConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& in_requested,
::aidl::android::media::audio::common::AudioPortConfig* out_suggested,
bool* _aidl_return) override;
ndk::ScopedAStatus resetAudioPatch(int32_t in_patchId) override;
ndk::ScopedAStatus resetAudioPortConfig(int32_t in_portConfigId) override;
ndk::ScopedAStatus getMasterMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMasterMute(bool in_mute) override;
ndk::ScopedAStatus getMasterVolume(float* _aidl_return) override;
ndk::ScopedAStatus setMasterVolume(float in_volume) override;
ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMicMute(bool in_mute) override;
ndk::ScopedAStatus getMicrophones(
std::vector<::aidl::android::media::audio::common::MicrophoneInfo>* _aidl_return)
override;
ndk::ScopedAStatus updateAudioMode(
::aidl::android::media::audio::common::AudioMode in_mode) override;
ndk::ScopedAStatus updateScreenRotation(
::aidl::android::hardware::audio::core::IModule::ScreenRotation in_rotation) override;
ndk::ScopedAStatus updateScreenState(bool in_isTurnedOn) override;
ndk::ScopedAStatus getSoundDose(std::shared_ptr<sounddose::ISoundDose>* _aidl_return) override;
ndk::ScopedAStatus generateHwAvSyncId(int32_t* _aidl_return) override;
ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_ids,
std::vector<VendorParameter>* _aidl_return) override;
ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
bool in_async) override;
ndk::ScopedAStatus addDeviceEffect(
int32_t in_portConfigId,
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeDeviceEffect(
int32_t in_portConfigId,
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus getMmapPolicyInfos(
::aidl::android::media::audio::common::AudioMMapPolicyType mmapPolicyType,
std::vector<::aidl::android::media::audio::common::AudioMMapPolicyInfo>* _aidl_return)
override;
ndk::ScopedAStatus supportsVariableLatency(bool* _aidl_return) override;
ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t* _aidl_return) override;
ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
// The maximum stream buffer size is 1 GiB = 2 ** 30 bytes;
static constexpr int32_t kMaximumStreamBufferSizeBytes = 1 << 30;
private:
struct VendorDebug {
static const std::string kForceTransientBurstName;
static const std::string kForceSynchronousDrainName;
bool forceTransientBurst = false;
bool forceSynchronousDrain = false;
};
// ids of device ports created at runtime via 'connectExternalDevice'.
// Also stores a list of ids of mix ports with dynamic profiles that were populated from
// the connected port. This list can be empty, thus an int->int multimap can't be used.
using ConnectedDevicePorts = std::map<int32_t, std::set<int32_t>>;
// Maps port ids and port config ids to patch ids.
// Multimap because both ports and configs can be used by multiple patches.
using Patches = std::multimap<int32_t, int32_t>;
static const std::string kClipTransitionSupportName;
const Type mType;
std::unique_ptr<Configuration> mConfig;
ModuleDebug mDebug;
VendorDebug mVendorDebug;
ConnectedDevicePorts mConnectedDevicePorts;
Streams mStreams;
Patches mPatches;
bool mMicMute = false;
bool mMasterMute = false;
float mMasterVolume = 1.0f;
ChildInterface<sounddose::SoundDose> mSoundDose;
std::optional<bool> mIsMmapSupported;
protected:
// The following virtual functions are intended for vendor extension via inheritance.
virtual ndk::ScopedAStatus createInputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) = 0;
virtual ndk::ScopedAStatus createOutputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo,
std::shared_ptr<StreamOut>* result) = 0;
// If the module is unable to populate the connected device port correctly, the returned error
// code must correspond to the errors of `IModule.connectedExternalDevice` method.
virtual ndk::ScopedAStatus populateConnectedDevicePort(
::aidl::android::media::audio::common::AudioPort* audioPort, int32_t nextPortId);
// If the module finds that the patch endpoints configurations are not matched, the returned
// error code must correspond to the errors of `IModule.setAudioPatch` method.
virtual ndk::ScopedAStatus checkAudioPatchEndpointsMatch(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks);
virtual void onExternalDeviceConnectionChanged(
const ::aidl::android::media::audio::common::AudioPort& audioPort, bool connected);
virtual void onPrepareToDisconnectExternalDevice(
const ::aidl::android::media::audio::common::AudioPort& audioPort);
virtual ndk::ScopedAStatus onMasterMuteChanged(bool mute);
virtual ndk::ScopedAStatus onMasterVolumeChanged(float volume);
virtual std::vector<::aidl::android::media::audio::common::MicrophoneInfo> getMicrophoneInfos();
virtual std::unique_ptr<Configuration> initializeConfig();
virtual int32_t getNominalLatencyMs(
const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
virtual ndk::ScopedAStatus calculateBufferSizeFrames(
const ::aidl::android::media::audio::common::AudioFormatDescription &format,
int32_t latencyMs, int32_t sampleRateHz, int32_t *bufferSizeFrames);
virtual ndk::ScopedAStatus createMmapBuffer(
const ::aidl::android::media::audio::common::AudioPortConfig& portConfig,
int32_t bufferSizeFrames, int32_t frameSizeBytes, MmapBufferDescriptor* desc);
// Utility and helper functions accessible to subclasses.
static int32_t calculateBufferSizeFramesForPcm(int32_t latencyMs, int32_t sampleRateHz) {
const int32_t rawSizeFrames =
aidl::android::hardware::audio::common::frameCountFromDurationMs(latencyMs,
sampleRateHz);
// Round up to nearest 16 frames since in the framework this is the size of a mixer burst.
const int32_t multipleOf16 = (rawSizeFrames + 15) & ~15;
if (sampleRateHz < 44100 || multipleOf16 <= 512) return multipleOf16;
// Larger buffers should use powers of 2.
int32_t powerOf2 = 1;
while (powerOf2 < multipleOf16) powerOf2 <<= 1;
return powerOf2;
}
ndk::ScopedAStatus bluetoothParametersUpdated();
void cleanUpPatch(int32_t patchId);
ndk::ScopedAStatus createStreamContext(
int32_t in_portConfigId, int64_t in_bufferSizeFrames,
std::shared_ptr<IStreamCallback> asyncCallback,
std::shared_ptr<IStreamOutEventCallback> outEventCallback,
::aidl::android::hardware::audio::core::StreamContext* out_context);
std::vector<::aidl::android::media::audio::common::AudioDevice> findConnectedDevices(
int32_t portConfigId);
std::set<int32_t> findConnectedPortConfigIds(int32_t portConfigId);
ndk::ScopedAStatus findPortIdForNewStream(
int32_t in_portConfigId, ::aidl::android::media::audio::common::AudioPort** port);
// Note: does not assign an ID to the config.
bool generateDefaultPortConfig(const ::aidl::android::media::audio::common::AudioPort& port,
::aidl::android::media::audio::common::AudioPortConfig* config);
std::vector<AudioRoute*> getAudioRoutesForAudioPortImpl(int32_t portId);
Configuration& getConfig();
const ConnectedDevicePorts& getConnectedDevicePorts() const { return mConnectedDevicePorts; }
std::vector<::aidl::android::media::audio::common::AudioDevice>
getDevicesFromDevicePortConfigIds(const std::set<int32_t>& devicePortConfigIds);
bool getMasterMute() const { return mMasterMute; }
bool getMasterVolume() const { return mMasterVolume; }
bool getMicMute() const { return mMicMute; }
const ModuleDebug& getModuleDebug() const { return mDebug; }
const Patches& getPatches() const { return mPatches; }
std::set<int32_t> getRoutableAudioPortIds(int32_t portId,
std::vector<AudioRoute*>* routes = nullptr);
const Streams& getStreams() const { return mStreams; }
Type getType() const { return mType; }
bool isMmapSupported();
void populateConnectedProfiles();
template <typename C>
std::set<int32_t> portIdsFromPortConfigIds(C portConfigIds);
void registerPatch(const AudioPatch& patch);
ndk::ScopedAStatus setAudioPortConfigImpl(
const ::aidl::android::media::audio::common::AudioPortConfig& in_requested,
const std::function<bool(const ::aidl::android::media::audio::common::AudioPort& port,
::aidl::android::media::audio::common::AudioPortConfig*
config)>& fillPortConfig,
::aidl::android::media::audio::common::AudioPortConfig* out_suggested, bool* applied);
ndk::ScopedAStatus updateStreamsConnectedState(const AudioPatch& oldPatch,
const AudioPatch& newPatch);
bool setAudioPortConfigGain(
const ::aidl::android::media::audio::common::AudioPort& port,
const ::aidl::android::media::audio::common::AudioGainConfig& gainRequested);
};
std::ostream& operator<<(std::ostream& os, Module::Type t);
} // namespace aidl::android::hardware::audio::core

View File

@@ -0,0 +1,40 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "core-impl/Module.h"
namespace aidl::android::hardware::audio::core {
// This class is intended to be used as a base class for implementations
// that use TinyAlsa. This can be either a primary module or a USB Audio
// module. This class does not define a complete module implementation,
// and should never be used on its own. Derived classes are expected to
// provide necessary overrides for all interface methods omitted here.
class ModuleAlsa : public Module {
public:
ModuleAlsa(Type type, std::unique_ptr<Configuration>&& config)
: Module(type, std::move(config)) {}
protected:
// Extension methods of 'Module'.
ndk::ScopedAStatus populateConnectedDevicePort(
::aidl::android::media::audio::common::AudioPort* audioPort,
int32_t nextPortId) override;
};
} // namespace aidl::android::hardware::audio::core

Some files were not shown because too many files have changed in this diff Show More