Compare commits

..

7 Commits

Author SHA1 Message Date
Attila Uygun be36d121b0 Update Oboe to 1.7.0 2023-05-21 23:41:15 +02:00
Attila Uygun 217420823c Update Android NDK to 25.2.9519653 2023-05-21 23:41:15 +02:00
Attila Uygun b2e5f19963 Remove exceptions 2023-05-21 23:41:15 +02:00
Attila Uygun 971540dce2 Runtime support to call ANativeWindow_setFrameRate for minSDK < 30 2023-05-21 23:02:24 +02:00
Attila Uygun 5cad50bf55 fix audio 2023-05-21 22:23:04 +02:00
Attila Uygun 41bb5e286a Fix for vulkan renderer 2023-05-21 22:23:04 +02:00
Attila Uygun 0454bf6a18 Disable ads in debug build 2023-05-21 22:23:04 +02:00
120 changed files with 3614 additions and 765 deletions

View File

@ -25,5 +25,5 @@ cd build/android
[glslang](https://github.com/KhronosGroup/glslang), [glslang](https://github.com/KhronosGroup/glslang),
[spirv-reflect](https://github.com/KhronosGroup/SPIRV-Reflect), [spirv-reflect](https://github.com/KhronosGroup/SPIRV-Reflect),
[vma](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator), [vma](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator),
[vulkan-sdk](https://vulkan.lunarg.com) [vulkan-sdk](https://vulkan.lunarg.com),
[volk](https://github.com/zeux/volk) [volk](https://github.com/zeux/volk)

View File

@ -2,7 +2,7 @@ apply plugin: 'com.android.application'
android { android {
compileSdk 33 compileSdk 33
ndkVersion '25.1.8937393' ndkVersion '25.2.9519653'
defaultConfig { defaultConfig {
applicationId = 'com.woom.game' applicationId = 'com.woom.game'

View File

@ -164,7 +164,8 @@ int AudioDriverAlsa::GetHardwareSampleRate() {
void AudioDriverAlsa::StartAudioThread() { void AudioDriverAlsa::StartAudioThread() {
LOG << "Starting audio thread."; LOG << "Starting audio thread.";
terminate_audio_thread_.store(false, std::memory_order_relaxed); terminate_audio_thread_.store(false, std::memory_order_relaxed);
suspend_audio_thread_.store(true, std::memory_order_relaxed); suspend_audio_thread_.store(delegate_ ? false : true,
std::memory_order_relaxed);
audio_thread_ = std::thread(&AudioDriverAlsa::AudioThreadMain, this); audio_thread_ = std::thread(&AudioDriverAlsa::AudioThreadMain, this);
} }

View File

@ -15,13 +15,13 @@ AudioDriverOboe::~AudioDriverOboe() = default;
void AudioDriverOboe::SetDelegate(AudioDriverDelegate* delegate) { void AudioDriverOboe::SetDelegate(AudioDriverDelegate* delegate) {
delegate_ = delegate; delegate_ = delegate;
Resume(); stream_->start();
} }
bool AudioDriverOboe::Initialize() { bool AudioDriverOboe::Initialize() {
LOG << "Initializing audio system."; LOG << "Initializing audio system.";
return RestartStream(true); return RestartStream();
} }
void AudioDriverOboe::Shutdown() { void AudioDriverOboe::Shutdown() {
@ -31,11 +31,11 @@ void AudioDriverOboe::Shutdown() {
} }
void AudioDriverOboe::Suspend() { void AudioDriverOboe::Suspend() {
stream_->stop(); stream_->pause();
} }
void AudioDriverOboe::Resume() { void AudioDriverOboe::Resume() {
RestartStream(); stream_->start();
} }
int AudioDriverOboe::GetHardwareSampleRate() { int AudioDriverOboe::GetHardwareSampleRate() {
@ -64,7 +64,7 @@ void AudioDriverOboe::StreamCallback::onErrorAfterClose(
driver_->RestartStream(); driver_->RestartStream();
} }
bool AudioDriverOboe::RestartStream(bool suspended) { bool AudioDriverOboe::RestartStream() {
oboe::AudioStreamBuilder builder; oboe::AudioStreamBuilder builder;
oboe::Result result = oboe::Result result =
builder.setSharingMode(oboe::SharingMode::Exclusive) builder.setSharingMode(oboe::SharingMode::Exclusive)
@ -88,7 +88,7 @@ bool AudioDriverOboe::RestartStream(bool suspended) {
return false; return false;
} }
if (!suspended) if (delegate_)
stream_->start(); stream_->start();
return true; return true;
} }

View File

@ -50,7 +50,7 @@ class AudioDriverOboe final : public AudioDriver {
AudioDriverDelegate* delegate_ = nullptr; AudioDriverDelegate* delegate_ = nullptr;
bool RestartStream(bool suspended = false); bool RestartStream();
}; };
} // namespace eng } // namespace eng

View File

@ -2,6 +2,7 @@ package com.kaliber.base;
import android.app.NativeActivity; import android.app.NativeActivity;
import android.content.Intent; import android.content.Intent;
import android.content.pm.ApplicationInfo;
import android.net.Uri; import android.net.Uri;
import android.os.Bundle; import android.os.Bundle;
import android.os.Handler; import android.os.Handler;
@ -37,12 +38,18 @@ public class KaliberActivity extends NativeActivity {
private InterstitialAd mInterstitialAd; private InterstitialAd mInterstitialAd;
boolean mIsDebuggable = false;
public static native void onShowAdResult(boolean succeeded); public static native void onShowAdResult(boolean succeeded);
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
ApplicationInfo appInfo = getApplicationContext().getApplicationInfo();
mIsDebuggable = (appInfo.flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0;
if (!mIsDebuggable) {
MobileAds.initialize(this, new OnInitializationCompleteListener() { MobileAds.initialize(this, new OnInitializationCompleteListener() {
@Override @Override
public void onInitializationComplete(InitializationStatus initializationStatus) { public void onInitializationComplete(InitializationStatus initializationStatus) {
@ -51,6 +58,7 @@ public class KaliberActivity extends NativeActivity {
}); });
loadInterstitialAd(); loadInterstitialAd();
} }
}
public void setKeepScreenOn(final boolean keepScreenOn) { public void setKeepScreenOn(final boolean keepScreenOn) {
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
@ -66,6 +74,8 @@ public class KaliberActivity extends NativeActivity {
} }
public void showInterstitialAd() { public void showInterstitialAd() {
if (mIsDebuggable) return;
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
@Override @Override
public void run() { public void run() {

View File

@ -16,8 +16,6 @@ using namespace base;
namespace eng { namespace eng {
Platform::InternalError Platform::internal_error;
Platform::Platform() = default; Platform::Platform() = default;
Platform::~Platform() = default; Platform::~Platform() = default;
@ -33,10 +31,8 @@ void Platform::InitializeCommon() {
#elif defined(__linux__) #elif defined(__linux__)
audio_driver_ = std::make_unique<AudioDriverAlsa>(); audio_driver_ = std::make_unique<AudioDriverAlsa>();
#endif #endif
if (!audio_driver_->Initialize()) { bool res = audio_driver_->Initialize();
LOG << "Failed to initialize audio driver."; CHECK(res) << "Failed to initialize audio driver.";
throw internal_error;
}
auto context = std::make_unique<VulkanContext>(); auto context = std::make_unique<VulkanContext>();
if (context->Initialize()) { if (context->Initialize()) {
@ -57,10 +53,8 @@ void Platform::ShutdownCommon() {
void Platform::RunMainLoop() { void Platform::RunMainLoop() {
engine_ = engine_ =
std::make_unique<Engine>(this, renderer_.get(), audio_driver_.get()); std::make_unique<Engine>(this, renderer_.get(), audio_driver_.get());
if (!engine_->Initialize()) { bool res = engine_->Initialize();
LOG << "Failed to initialize the engine."; CHECK(res) << "Failed to initialize the engine.";
throw internal_error;
}
// Use fixed time steps. // Use fixed time steps.
float time_step = engine_->time_step(); float time_step = engine_->time_step();

View File

@ -1,7 +1,6 @@
#ifndef ENGINE_PLATFORM_PLATFORM_H #ifndef ENGINE_PLATFORM_PLATFORM_H
#define ENGINE_PLATFORM_PLATFORM_H #define ENGINE_PLATFORM_PLATFORM_H
#include <exception>
#include <memory> #include <memory>
#include <string> #include <string>
@ -14,6 +13,7 @@
struct android_app; struct android_app;
struct AInputEvent; struct AInputEvent;
struct ANativeWindow;
#elif defined(__linux__) #elif defined(__linux__)
@ -65,9 +65,6 @@ class Platform {
bool mobile_device() const { return mobile_device_; } bool mobile_device() const { return mobile_device_; }
static class InternalError : public std::exception {
} internal_error;
protected: protected:
base::Timer timer_; base::Timer timer_;
@ -96,6 +93,21 @@ class Platform {
static int32_t HandleInput(android_app* app, AInputEvent* event); static int32_t HandleInput(android_app* app, AInputEvent* event);
static void HandleCmd(android_app* app, int32_t cmd); static void HandleCmd(android_app* app, int32_t cmd);
using PFN_ANativeWindow_setFrameRate = int32_t (*)(ANativeWindow* window,
float frameRate,
int8_t compatibility);
using PFN_ANativeWindow_setFrameRateWithChangeStrategy =
int32_t (*)(ANativeWindow* window,
float frameRate,
int8_t compatibility,
int8_t changeFrameRateStrategy);
PFN_ANativeWindow_setFrameRate ANativeWindow_setFrameRate = nullptr;
PFN_ANativeWindow_setFrameRateWithChangeStrategy
ANativeWindow_setFrameRateWithChangeStrategy = nullptr;
void SetFrameRate(float frame_rate);
#elif defined(__linux__) #elif defined(__linux__)
Display* display_ = nullptr; Display* display_ = nullptr;

View File

@ -1,6 +1,7 @@
#include "engine/platform/platform.h" #include "engine/platform/platform.h"
#include <android_native_app_glue.h> #include <android_native_app_glue.h>
#include <dlfcn.h>
#include <jni.h> #include <jni.h>
#include <unistd.h> #include <unistd.h>
@ -17,11 +18,6 @@ namespace {
bool g_showing_interstitial_ad = false; bool g_showing_interstitial_ad = false;
extern "C" { extern "C" {
JNIEXPORT void JNICALL
Java_com_kaliber_base_KaliberActivity_onShowAdResult(JNIEnv* env,
jobject obj,
jboolean succeeded);
};
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_com_kaliber_base_KaliberActivity_onShowAdResult(JNIEnv* env, Java_com_kaliber_base_KaliberActivity_onShowAdResult(JNIEnv* env,
@ -29,6 +25,7 @@ Java_com_kaliber_base_KaliberActivity_onShowAdResult(JNIEnv* env,
jboolean succeeded) { jboolean succeeded) {
g_showing_interstitial_ad = !!succeeded; g_showing_interstitial_ad = !!succeeded;
} }
}
std::string GetApkPath(ANativeActivity* activity) { std::string GetApkPath(ANativeActivity* activity) {
JNIEnv* env = nullptr; JNIEnv* env = nullptr;
@ -297,10 +294,10 @@ void Platform::HandleCmd(android_app* app, int32_t cmd) {
case APP_CMD_INIT_WINDOW: case APP_CMD_INIT_WINDOW:
DLOG << "APP_CMD_INIT_WINDOW"; DLOG << "APP_CMD_INIT_WINDOW";
if (app->window != NULL) { if (app->window != NULL) {
if (!platform->renderer_->Initialize(app->window)) { platform->SetFrameRate(60);
LOG << "Failed to initialize the renderer."; bool res = platform->renderer_->Initialize(app->window);
throw internal_error; CHECK(res) << "Failed to initialize "
} << platform->renderer_->GetDebugName() << " renderer.";
} }
break; break;
@ -317,10 +314,9 @@ void Platform::HandleCmd(android_app* app, int32_t cmd) {
if (width != ANativeWindow_getWidth(app->window) || if (width != ANativeWindow_getWidth(app->window) ||
height != ANativeWindow_getHeight(app->window)) { height != ANativeWindow_getHeight(app->window)) {
platform->renderer_->Shutdown(); platform->renderer_->Shutdown();
if (!platform->renderer_->Initialize(platform->app_->window)) { bool res = platform->renderer_->Initialize(platform->app_->window);
LOG << "Failed to initialize the renderer."; CHECK(res) << "Failed to initialize "
throw internal_error; << platform->renderer_->GetDebugName() << " renderer.";
}
} }
} }
break; break;
@ -374,6 +370,18 @@ void Platform::Initialize(android_app* app) {
app->onAppCmd = Platform::HandleCmd; app->onAppCmd = Platform::HandleCmd;
app->onInputEvent = Platform::HandleInput; app->onInputEvent = Platform::HandleInput;
// Get pointers for functions that are supported from API > minSdk if
// available.
void* mLibAndroid = dlopen("libandroid.so", RTLD_NOW | RTLD_LOCAL);
if (mLibAndroid) {
ANativeWindow_setFrameRate =
reinterpret_cast<PFN_ANativeWindow_setFrameRate>(
dlsym(mLibAndroid, "ANativeWindow_setFrameRate"));
ANativeWindow_setFrameRateWithChangeStrategy =
reinterpret_cast<PFN_ANativeWindow_setFrameRateWithChangeStrategy>(
dlsym(mLibAndroid, "ANativeWindow_setFrameRateWithChangeStrategy"));
}
Update(); Update();
} }
@ -420,15 +428,23 @@ void Platform::SetKeepScreenOn(bool keep_screen_on) {
::SetKeepScreenOn(app_->activity, keep_screen_on); ::SetKeepScreenOn(app_->activity, keep_screen_on);
} }
void Platform::SetFrameRate(float frame_rate) {
if (ANativeWindow_setFrameRateWithChangeStrategy) {
ANativeWindow_setFrameRateWithChangeStrategy(
app_->window, frame_rate,
ANATIVEWINDOW_FRAME_RATE_COMPATIBILITY_DEFAULT, 1);
} else if (ANativeWindow_setFrameRate) {
ANativeWindow_setFrameRate(app_->window, frame_rate,
ANATIVEWINDOW_FRAME_RATE_COMPATIBILITY_DEFAULT);
}
}
} // namespace eng } // namespace eng
void android_main(android_app* app) { void android_main(android_app* app) {
eng::Platform platform; eng::Platform platform;
try {
platform.Initialize(app); platform.Initialize(app);
platform.RunMainLoop(); platform.RunMainLoop();
platform.Shutdown(); platform.Shutdown();
} catch (eng::Platform::InternalError& e) {
}
_exit(0); _exit(0);
} }

View File

@ -23,15 +23,12 @@ void Platform::Initialize() {
shared_data_path_ = "./"; shared_data_path_ = "./";
LOG << "Shared data path: " << shared_data_path_.c_str(); LOG << "Shared data path: " << shared_data_path_.c_str();
if (!CreateWindow(800, 1205)) { bool res = CreateWindow(800, 1205);
LOG << "Failed to create window."; CHECK(res) << "Failed to create window.";
throw internal_error;
}
if (!renderer_->Initialize(display_, window_)) { res = renderer_->Initialize(display_, window_);
LOG << "Failed to initialize renderer."; CHECK(res) << "Failed to initialize " << renderer_->GetDebugName()
throw internal_error; << " renderer.";
}
XSelectInput(display_, window_, XSelectInput(display_, window_,
KeyPressMask | Button1MotionMask | ButtonPressMask | KeyPressMask | Button1MotionMask | ButtonPressMask |
@ -164,12 +161,8 @@ void Platform::DestroyWindow() {
int main(int argc, char** argv) { int main(int argc, char** argv) {
eng::Platform platform; eng::Platform platform;
try {
platform.Initialize(); platform.Initialize();
platform.RunMainLoop(); platform.RunMainLoop();
platform.Shutdown(); platform.Shutdown();
} catch (eng::Platform::InternalError& e) {
return -1;
}
return 0; return 0;
} }

View File

@ -708,23 +708,33 @@ bool VulkanContext::InitializeQueues(VkSurfaceKHR surface) {
// If the format list includes just one entry of VK_FORMAT_UNDEFINED, the // If the format list includes just one entry of VK_FORMAT_UNDEFINED, the
// surface has no preferred format. Otherwise, at least one supported format // surface has no preferred format. Otherwise, at least one supported format
// will be returned. // will be returned.
if (true || if (format_count == 1 && surf_formats[0].format == VK_FORMAT_UNDEFINED) {
(format_count == 1 && surf_formats[0].format == VK_FORMAT_UNDEFINED)) {
format_ = desired_format; format_ = desired_format;
} else { color_space_ = surf_formats[0].colorSpace;
if (format_count < 1) { } else if (format_count < 1) {
DLOG << "Format count less than 1."; DLOG << "Format count less than 1.";
return false; return false;
} } else {
format_ = surf_formats[0].format; // Find the first format that we support.
for (unsigned i = 0; i < format_count; ++i) { format_ = VK_FORMAT_UNDEFINED;
if (surf_formats[i].format == desired_format) { const VkFormat allowed_formats[] = {VK_FORMAT_B8G8R8A8_UNORM,
format_ = desired_format; VK_FORMAT_R8G8B8A8_UNORM};
break; for (uint32_t afi = 0; afi < std::size(allowed_formats); afi++) {
for (uint32_t sfi = 0; sfi < format_count; sfi++) {
if (surf_formats[sfi].format == allowed_formats[afi]) {
format_ = surf_formats[sfi].format;
color_space_ = surf_formats[sfi].colorSpace;
goto end_of_find_format;
} }
} }
} }
color_space_ = surf_formats[0].colorSpace;
end_of_find_format:
if (format_ == VK_FORMAT_UNDEFINED) {
DLOG << "No usable surface format found.";
return false;
}
}
if (!CreateSemaphores()) if (!CreateSemaphores())
return false; return false;
@ -972,10 +982,10 @@ bool VulkanContext::UpdateSwapChain(Window* window) {
VkCompositeAlphaFlagBitsKHR composite_alpha = VkCompositeAlphaFlagBitsKHR composite_alpha =
VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
VkCompositeAlphaFlagBitsKHR composite_alpha_flags[4] = { VkCompositeAlphaFlagBitsKHR composite_alpha_flags[4] = {
VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR, VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
}; };
for (uint32_t i = 0; i < std::size(composite_alpha_flags); i++) { for (uint32_t i = 0; i < std::size(composite_alpha_flags); i++) {
if (surf_capabilities.supportedCompositeAlpha & composite_alpha_flags[i]) { if (surf_capabilities.supportedCompositeAlpha & composite_alpha_flags[i]) {
@ -1019,7 +1029,7 @@ bool VulkanContext::UpdateSwapChain(Window* window) {
err = GetSwapchainImagesKHR(device_, window->swapchain, &sp_image_count, err = GetSwapchainImagesKHR(device_, window->swapchain, &sp_image_count,
nullptr); nullptr);
if (err) { if (err) {
DLOG << "CreateSwapchainKHR failed. Error: " << string_VkResult(err); DLOG << "GetSwapchainImagesKHR failed. Error: " << string_VkResult(err);
return false; return false;
} }

View File

@ -18,8 +18,11 @@ set (oboe_sources
src/common/FixedBlockReader.cpp src/common/FixedBlockReader.cpp
src/common/FixedBlockWriter.cpp src/common/FixedBlockWriter.cpp
src/common/LatencyTuner.cpp src/common/LatencyTuner.cpp
src/common/OboeExtensions.cpp
src/common/SourceFloatCaller.cpp src/common/SourceFloatCaller.cpp
src/common/SourceI16Caller.cpp src/common/SourceI16Caller.cpp
src/common/SourceI24Caller.cpp
src/common/SourceI32Caller.cpp
src/common/Utilities.cpp src/common/Utilities.cpp
src/common/QuirksManager.cpp src/common/QuirksManager.cpp
src/fifo/FifoBuffer.cpp src/fifo/FifoBuffer.cpp
@ -27,17 +30,23 @@ set (oboe_sources
src/fifo/FifoControllerBase.cpp src/fifo/FifoControllerBase.cpp
src/fifo/FifoControllerIndirect.cpp src/fifo/FifoControllerIndirect.cpp
src/flowgraph/FlowGraphNode.cpp src/flowgraph/FlowGraphNode.cpp
src/flowgraph/ChannelCountConverter.cpp
src/flowgraph/ClipToRange.cpp src/flowgraph/ClipToRange.cpp
src/flowgraph/ManyToMultiConverter.cpp src/flowgraph/ManyToMultiConverter.cpp
src/flowgraph/MonoBlend.cpp
src/flowgraph/MonoToMultiConverter.cpp src/flowgraph/MonoToMultiConverter.cpp
src/flowgraph/MultiToManyConverter.cpp
src/flowgraph/MultiToMonoConverter.cpp
src/flowgraph/RampLinear.cpp src/flowgraph/RampLinear.cpp
src/flowgraph/SampleRateConverter.cpp src/flowgraph/SampleRateConverter.cpp
src/flowgraph/SinkFloat.cpp src/flowgraph/SinkFloat.cpp
src/flowgraph/SinkI16.cpp src/flowgraph/SinkI16.cpp
src/flowgraph/SinkI24.cpp src/flowgraph/SinkI24.cpp
src/flowgraph/SinkI32.cpp
src/flowgraph/SourceFloat.cpp src/flowgraph/SourceFloat.cpp
src/flowgraph/SourceI16.cpp src/flowgraph/SourceI16.cpp
src/flowgraph/SourceI24.cpp src/flowgraph/SourceI24.cpp
src/flowgraph/SourceI32.cpp
src/flowgraph/resampler/IntegerRatio.cpp src/flowgraph/resampler/IntegerRatio.cpp
src/flowgraph/resampler/LinearResampler.cpp src/flowgraph/resampler/LinearResampler.cpp
src/flowgraph/resampler/MultiChannelResampler.cpp src/flowgraph/resampler/MultiChannelResampler.cpp
@ -70,7 +79,7 @@ target_include_directories(oboe
# Enable -Ofast # Enable -Ofast
target_compile_options(oboe target_compile_options(oboe
PRIVATE PRIVATE
-std=c++14 -std=c++17
-Wall -Wall
-Wextra-semi -Wextra-semi
-Wshadow -Wshadow

202
src/third_party/oboe/LICENSE vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -130,7 +130,7 @@ public:
* *
* @return state or a negative error. * @return state or a negative error.
*/ */
virtual StreamState getState() const = 0; virtual StreamState getState() = 0;
/** /**
* Wait until the stream's current state no longer matches the input state. * Wait until the stream's current state no longer matches the input state.
@ -191,7 +191,7 @@ public:
* @return a result which is either Result::OK with the xRun count as the value, or a * @return a result which is either Result::OK with the xRun count as the value, or a
* Result::Error* code * Result::Error* code
*/ */
virtual ResultWithValue<int32_t> getXRunCount() const { virtual ResultWithValue<int32_t> getXRunCount() {
return ResultWithValue<int32_t>(Result::ErrorUnimplemented); return ResultWithValue<int32_t>(Result::ErrorUnimplemented);
} }
@ -205,7 +205,9 @@ public:
* *
* @return burst size * @return burst size
*/ */
virtual int32_t getFramesPerBurst() = 0; int32_t getFramesPerBurst() const {
return mFramesPerBurst;
}
/** /**
* Get the number of bytes in each audio frame. This is calculated using the channel count * Get the number of bytes in each audio frame. This is calculated using the channel count
@ -260,6 +262,10 @@ public:
* The latency of an OUTPUT stream is generally higher than the INPUT latency * The latency of an OUTPUT stream is generally higher than the INPUT latency
* because an app generally tries to keep the OUTPUT buffer full and the INPUT buffer empty. * because an app generally tries to keep the OUTPUT buffer full and the INPUT buffer empty.
* *
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* @return a ResultWithValue which has a result of Result::OK and a value containing the latency * @return a ResultWithValue which has a result of Result::OK and a value containing the latency
* in milliseconds, or a result of Result::Error*. * in milliseconds, or a result of Result::Error*.
*/ */
@ -278,6 +284,10 @@ public:
* The time is based on the implementation's best effort, using whatever knowledge is available * The time is based on the implementation's best effort, using whatever knowledge is available
* to the system, but cannot account for any delay unknown to the implementation. * to the system, but cannot account for any delay unknown to the implementation.
* *
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* @deprecated since 1.0, use AudioStream::getTimestamp(clockid_t clockId) instead, which * @deprecated since 1.0, use AudioStream::getTimestamp(clockid_t clockId) instead, which
* returns ResultWithValue * returns ResultWithValue
* @param clockId the type of clock to use e.g. CLOCK_MONOTONIC * @param clockId the type of clock to use e.g. CLOCK_MONOTONIC
@ -301,6 +311,11 @@ public:
* The time is based on the implementation's best effort, using whatever knowledge is available * The time is based on the implementation's best effort, using whatever knowledge is available
* to the system, but cannot account for any delay unknown to the implementation. * to the system, but cannot account for any delay unknown to the implementation.
* *
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* See
* @param clockId the type of clock to use e.g. CLOCK_MONOTONIC * @param clockId the type of clock to use e.g. CLOCK_MONOTONIC
* @return a FrameTimestamp containing the position and time at which a particular audio frame * @return a FrameTimestamp containing the position and time at which a particular audio frame
* entered or left the audio processing pipeline, or an error if the operation failed. * entered or left the audio processing pipeline, or an error if the operation failed.
@ -372,11 +387,6 @@ public:
return nullptr; return nullptr;
} }
/**
* Launch a thread that will stop the stream.
*/
void launchStopThread();
/** /**
* Update mFramesWritten. * Update mFramesWritten.
* For internal use only. * For internal use only.
@ -393,12 +403,25 @@ public:
* Swap old callback for new callback. * Swap old callback for new callback.
* This not atomic. * This not atomic.
* This should only be used internally. * This should only be used internally.
* @param streamCallback * @param dataCallback
* @return previous streamCallback * @return previous dataCallback
*/ */
AudioStreamCallback *swapCallback(AudioStreamCallback *streamCallback) { AudioStreamDataCallback *swapDataCallback(AudioStreamDataCallback *dataCallback) {
AudioStreamCallback *previousCallback = mStreamCallback; AudioStreamDataCallback *previousCallback = mDataCallback;
mStreamCallback = streamCallback; mDataCallback = dataCallback;
return previousCallback;
}
/*
* Swap old callback for new callback.
* This not atomic.
* This should only be used internally.
* @param errorCallback
* @return previous errorCallback
*/
AudioStreamErrorCallback *swapErrorCallback(AudioStreamErrorCallback *errorCallback) {
AudioStreamErrorCallback *previousCallback = mErrorCallback;
mErrorCallback = errorCallback;
return previousCallback; return previousCallback;
} }
@ -419,6 +442,36 @@ public:
ResultWithValue<int32_t> waitForAvailableFrames(int32_t numFrames, ResultWithValue<int32_t> waitForAvailableFrames(int32_t numFrames,
int64_t timeoutNanoseconds); int64_t timeoutNanoseconds);
/**
* @return last result passed from an error callback
*/
virtual oboe::Result getLastErrorCallbackResult() const {
return mErrorCallbackResult;
}
int32_t getDelayBeforeCloseMillis() const {
return mDelayBeforeCloseMillis;
}
/**
* Set the time to sleep before closing the internal stream.
*
* Sometimes a callback can occur shortly after a stream has been stopped and
* even after a close! If the stream has been closed then the callback
* might access memory that has been freed, which could cause a crash.
* This seems to be more likely in Android P or earlier.
* But it can also occur in later versions. By sleeping, we give time for
* the callback threads to finish.
*
* Note that this only has an effect when OboeGlobals::areWorkaroundsEnabled() is true.
*
* @param delayBeforeCloseMillis time to sleep before close.
*/
void setDelayBeforeCloseMillis(int32_t delayBeforeCloseMillis) {
mDelayBeforeCloseMillis = delayBeforeCloseMillis;
}
protected: protected:
/** /**
@ -480,6 +533,21 @@ protected:
mDataCallbackEnabled = enabled; mDataCallbackEnabled = enabled;
} }
/**
* This should only be called as a stream is being opened.
* Otherwise we might override setDelayBeforeCloseMillis().
*/
void calculateDefaultDelayBeforeCloseMillis();
/**
* Try to avoid a race condition when closing.
*/
void sleepBeforeClose() {
if (mDelayBeforeCloseMillis > 0) {
usleep(mDelayBeforeCloseMillis * 1000);
}
}
/* /*
* Set a weak_ptr to this stream from the shared_ptr so that we can * Set a weak_ptr to this stream from the shared_ptr so that we can
* later use a shared_ptr in the error callback. * later use a shared_ptr in the error callback.
@ -515,15 +583,27 @@ protected:
std::mutex mLock; // for synchronizing start/stop/close std::mutex mLock; // for synchronizing start/stop/close
oboe::Result mErrorCallbackResult = oboe::Result::OK;
/**
* Number of frames which will be copied to/from the audio device in a single read/write
* operation
*/
int32_t mFramesPerBurst = kUnspecified;
// Time to sleep in order to prevent a race condition with a callback after a close().
// Two milliseconds may be enough but 10 msec is even safer.
static constexpr int kMinDelayBeforeCloseMillis = 10;
int32_t mDelayBeforeCloseMillis = kMinDelayBeforeCloseMillis;
private: private:
// Log the scheduler if it changes. // Log the scheduler if it changes.
void checkScheduler(); void checkScheduler();
int mPreviousScheduler = -1; int mPreviousScheduler = -1;
std::atomic<bool> mDataCallbackEnabled{false}; std::atomic<bool> mDataCallbackEnabled{false};
std::atomic<bool> mErrorCallbackCalled{false}; std::atomic<bool> mErrorCallbackCalled{false};
}; };
/** /**

View File

@ -18,6 +18,7 @@
#define OBOE_STREAM_BASE_H_ #define OBOE_STREAM_BASE_H_
#include <memory> #include <memory>
#include <string>
#include "oboe/AudioStreamCallback.h" #include "oboe/AudioStreamCallback.h"
#include "oboe/Definitions.h" #include "oboe/Definitions.h"
@ -62,9 +63,14 @@ public:
int32_t getSampleRate() const { return mSampleRate; } int32_t getSampleRate() const { return mSampleRate; }
/** /**
* @return the number of frames in each callback or kUnspecified. * @deprecated use `getFramesPerDataCallback` instead.
*/ */
int32_t getFramesPerCallback() const { return mFramesPerCallback; } int32_t getFramesPerCallback() const { return getFramesPerDataCallback(); }
/**
* @return the number of frames in each data callback or kUnspecified.
*/
int32_t getFramesPerDataCallback() const { return mFramesPerCallback; }
/** /**
* @return the audio sample format (e.g. Float or I16) * @return the audio sample format (e.g. Float or I16)
@ -100,10 +106,35 @@ public:
int32_t getDeviceId() const { return mDeviceId; } int32_t getDeviceId() const { return mDeviceId; }
/** /**
* @return the callback object for this stream, if set. * For internal use only.
* @return the data callback object for this stream, if set.
*/ */
AudioStreamCallback* getCallback() const { AudioStreamDataCallback *getDataCallback() const {
return mStreamCallback; return mDataCallback;
}
/**
* For internal use only.
* @return the error callback object for this stream, if set.
*/
AudioStreamErrorCallback *getErrorCallback() const {
return mErrorCallback;
}
/**
* @return true if a data callback was set for this stream
*/
bool isDataCallbackSpecified() const {
return mDataCallback != nullptr;
}
/**
* Note that if the app does not set an error callback then a
* default one may be provided.
* @return true if an error callback was set for this stream
*/
bool isErrorCallbackSpecified() const {
return mErrorCallback != nullptr;
} }
/** /**
@ -147,10 +178,22 @@ public:
return mSampleRateConversionQuality; return mSampleRateConversionQuality;
} }
protected: /**
* @return the stream's channel mask.
*/
ChannelMask getChannelMask() const {
return mChannelMask;
}
protected:
/** The callback which will be fired when new data is ready to be read/written. **/
AudioStreamDataCallback *mDataCallback = nullptr;
std::shared_ptr<AudioStreamDataCallback> mSharedDataCallback;
/** The callback which will be fired when an error or a disconnect occurs. **/
AudioStreamErrorCallback *mErrorCallback = nullptr;
std::shared_ptr<AudioStreamErrorCallback> mSharedErrorCallback;
/** The callback which will be fired when new data is ready to be read/written **/
AudioStreamCallback *mStreamCallback = nullptr;
/** Number of audio frames which will be requested in each callback */ /** Number of audio frames which will be requested in each callback */
int32_t mFramesPerCallback = kUnspecified; int32_t mFramesPerCallback = kUnspecified;
/** Stream channel count */ /** Stream channel count */
@ -163,11 +206,8 @@ protected:
int32_t mBufferCapacityInFrames = kUnspecified; int32_t mBufferCapacityInFrames = kUnspecified;
/** Stream buffer size specified as a number of audio frames */ /** Stream buffer size specified as a number of audio frames */
int32_t mBufferSizeInFrames = kUnspecified; int32_t mBufferSizeInFrames = kUnspecified;
/** /** Stream channel mask. Only active on Android 32+ */
* Number of frames which will be copied to/from the audio device in a single read/write ChannelMask mChannelMask = ChannelMask::Unspecified;
* operation
*/
int32_t mFramesPerBurst = kUnspecified;
/** Stream sharing mode */ /** Stream sharing mode */
SharingMode mSharingMode = SharingMode::Shared; SharingMode mSharingMode = SharingMode::Shared;
@ -189,12 +229,44 @@ protected:
/** Stream session ID allocation strategy. Only active on Android 28+ */ /** Stream session ID allocation strategy. Only active on Android 28+ */
SessionId mSessionId = SessionId::None; SessionId mSessionId = SessionId::None;
/** Control the name of the package creating the stream. Only active on Android 31+ */
std::string mPackageName;
/** Control the attribution tag of the context creating the stream. Only active on Android 31+ */
std::string mAttributionTag;
// Control whether Oboe can convert channel counts to achieve optimal results. // Control whether Oboe can convert channel counts to achieve optimal results.
bool mChannelConversionAllowed = false; bool mChannelConversionAllowed = false;
// Control whether Oboe can convert data formats to achieve optimal results. // Control whether Oboe can convert data formats to achieve optimal results.
bool mFormatConversionAllowed = false; bool mFormatConversionAllowed = false;
// Control whether and how Oboe can convert sample rates to achieve optimal results. // Control whether and how Oboe can convert sample rates to achieve optimal results.
SampleRateConversionQuality mSampleRateConversionQuality = SampleRateConversionQuality::None; SampleRateConversionQuality mSampleRateConversionQuality = SampleRateConversionQuality::None;
/** Validate stream parameters that might not be checked in lower layers */
virtual Result isValidConfig() {
switch (mFormat) {
case AudioFormat::Unspecified:
case AudioFormat::I16:
case AudioFormat::Float:
case AudioFormat::I24:
case AudioFormat::I32:
break;
default:
return Result::ErrorInvalidFormat;
}
switch (mSampleRateConversionQuality) {
case SampleRateConversionQuality::None:
case SampleRateConversionQuality::Fastest:
case SampleRateConversionQuality::Low:
case SampleRateConversionQuality::Medium:
case SampleRateConversionQuality::High:
case SampleRateConversionQuality::Best:
return Result::OK;
default:
return Result::ErrorIllegalArgument;
}
}
}; };
} // namespace oboe } // namespace oboe

View File

@ -19,6 +19,7 @@
#include "oboe/Definitions.h" #include "oboe/Definitions.h"
#include "oboe/AudioStreamBase.h" #include "oboe/AudioStreamBase.h"
#include "oboe/Utilities.h"
#include "ResultWithValue.h" #include "ResultWithValue.h"
namespace oboe { namespace oboe {
@ -42,9 +43,33 @@ public:
* *
* Default is kUnspecified. If the value is unspecified then * Default is kUnspecified. If the value is unspecified then
* the application should query for the actual value after the stream is opened. * the application should query for the actual value after the stream is opened.
*
* As the channel count here may be different from the corresponding channel count of
* provided channel mask used in setChannelMask(). The last called will be respected
* if this function and setChannelMask() are called.
*/ */
AudioStreamBuilder *setChannelCount(int channelCount) { AudioStreamBuilder *setChannelCount(int channelCount) {
mChannelCount = channelCount; mChannelCount = channelCount;
mChannelMask = ChannelMask::Unspecified;
return this;
}
/**
* Request a specific channel mask.
*
* Default is kUnspecified. If the value is unspecified then the application
* should query for the actual value after the stream is opened.
*
* As the corresponding channel count of provided channel mask here may be different
* from the channel count used in setChannelCount(). The last called will be respected
* if this function and setChannelCount() are called.
*
* As the setChannelMask API is available on Android 32+, this call will only take effects
* on Android 32+.
*/
AudioStreamBuilder *setChannelMask(ChannelMask channelMask) {
mChannelMask = channelMask;
mChannelCount = getChannelCountFromChannelMask(channelMask);
return this; return this;
} }
@ -74,6 +99,13 @@ public:
return this; return this;
} }
/**
* @deprecated use `setFramesPerDataCallback` instead.
*/
AudioStreamBuilder *setFramesPerCallback(int framesPerCallback) {
return setFramesPerDataCallback(framesPerCallback);
}
/** /**
* Request a specific number of frames for the data callback. * Request a specific number of frames for the data callback.
* *
@ -85,10 +117,18 @@ public:
* the callbacks. But if your application is, for example, doing FFTs or other block * the callbacks. But if your application is, for example, doing FFTs or other block
* oriented operations, then call this function to get the sizes you need. * oriented operations, then call this function to get the sizes you need.
* *
* Calling setFramesPerDataCallback() does not guarantee anything about timing.
* This just collects the data into a the number of frames that your app requires.
* We encourage leaving this unspecified in most cases.
*
* If this number is larger than the burst size, some bursts will not receive a callback.
* If this number is smaller than the burst size, there may be multiple callbacks in a single
* burst.
*
* @param framesPerCallback * @param framesPerCallback
* @return pointer to the builder so calls can be chained * @return pointer to the builder so calls can be chained
*/ */
AudioStreamBuilder *setFramesPerCallback(int framesPerCallback) { AudioStreamBuilder *setFramesPerDataCallback(int framesPerCallback) {
mFramesPerCallback = framesPerCallback; mFramesPerCallback = framesPerCallback;
return this; return this;
} }
@ -198,10 +238,11 @@ public:
/** /**
* Set the intended use case for the stream. * Set the intended use case for an output stream.
* *
* The system will use this information to optimize the behavior of the stream. * The system will use this information to optimize the behavior of the stream.
* This could, for example, affect how volume and focus is handled for the stream. * This could, for example, affect how volume and focus is handled for the stream.
* The usage is ignored for input streams.
* *
* The default, if you do not call this function, is Usage::Media. * The default, if you do not call this function, is Usage::Media.
* *
@ -215,10 +256,11 @@ public:
} }
/** /**
* Set the type of audio data that the stream will carry. * Set the type of audio data that an output stream will carry.
* *
* The system will use this information to optimize the behavior of the stream. * The system will use this information to optimize the behavior of the stream.
* This could, for example, affect whether a stream is paused when a notification occurs. * This could, for example, affect whether a stream is paused when a notification occurs.
* The contentType is ignored for input streams.
* *
* The default, if you do not call this function, is ContentType::Music. * The default, if you do not call this function, is ContentType::Music.
* *
@ -286,11 +328,14 @@ public:
* In most cases, the primary device will be the appropriate device to use, and the * In most cases, the primary device will be the appropriate device to use, and the
* deviceId can be left kUnspecified. * deviceId can be left kUnspecified.
* *
* On Android, for example, the ID could be obtained from the Java AudioManager. * The ID could be obtained from the Java AudioManager.
* AudioManager.getDevices() returns an array of AudioDeviceInfo[], which contains * AudioManager.getDevices() returns an array of AudioDeviceInfo,
* a getId() method (as well as other type information), that should be passed * which contains a getId() method. That ID can be passed to this function.
* to this method.
* *
* It is possible that you may not get the device that you requested.
* So if it is important to you, you should call
* stream->getDeviceId() after the stream is opened to
* verify the actual ID.
* *
* Note that when using OpenSL ES, this will be ignored and the created * Note that when using OpenSL ES, this will be ignored and the created
* stream will have deviceId kUnspecified. * stream will have deviceId kUnspecified.
@ -303,9 +348,86 @@ public:
return this; return this;
} }
/**
* Specifies an object to handle data related callbacks from the underlying API.
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong>
*
* We pass a shared_ptr so that the sharedDataCallback object cannot be deleted
* before the stream is deleted.
*
* @param dataCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setDataCallback(std::shared_ptr<AudioStreamDataCallback> sharedDataCallback) {
// Use this raw pointer in the rest of the code to retain backwards compatibility.
mDataCallback = sharedDataCallback.get();
// Hold a shared_ptr to protect the raw pointer for the lifetime of the stream.
mSharedDataCallback = sharedDataCallback;
return this;
}
/**
* Pass a raw pointer to a data callback. This is not recommended because the dataCallback
* object might get deleted by the app while it is being used.
*
* @deprecated Call setDataCallback(std::shared_ptr<AudioStreamDataCallback>) instead.
* @param dataCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setDataCallback(AudioStreamDataCallback *dataCallback) {
mDataCallback = dataCallback;
mSharedDataCallback = nullptr;
return this;
}
/**
* Specifies an object to handle error related callbacks from the underlying API.
* This can occur when a stream is disconnected because a headset is plugged in or unplugged.
* It can also occur if the audio service fails or if an exclusive stream is stolen by
* another stream.
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong>
*
* <strong>When an error callback occurs, the associated stream must be stopped and closed
* in a separate thread.</strong>
*
* We pass a shared_ptr so that the errorCallback object cannot be deleted before the stream is deleted.
* If the stream was created using a shared_ptr then the stream cannot be deleted before the
* error callback has finished running.
*
* @param sharedErrorCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setErrorCallback(std::shared_ptr<AudioStreamErrorCallback> sharedErrorCallback) {
// Use this raw pointer in the rest of the code to retain backwards compatibility.
mErrorCallback = sharedErrorCallback.get();
// Hold a shared_ptr to protect the raw pointer for the lifetime of the stream.
mSharedErrorCallback = sharedErrorCallback;
return this;
}
/**
* Pass a raw pointer to an error callback. This is not recommended because the errorCallback
* object might get deleted by the app while it is being used.
*
* @deprecated Call setErrorCallback(std::shared_ptr<AudioStreamErrorCallback>) instead.
* @param errorCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setErrorCallback(AudioStreamErrorCallback *errorCallback) {
mErrorCallback = errorCallback;
mSharedErrorCallback = nullptr;
return this;
}
/** /**
* Specifies an object to handle data or error related callbacks from the underlying API. * Specifies an object to handle data or error related callbacks from the underlying API.
* *
* This is the equivalent of calling both setDataCallback() and setErrorCallback().
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called * <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong> * from the callback methods.</strong>
* *
@ -325,7 +447,9 @@ public:
* @return pointer to the builder so calls can be chained * @return pointer to the builder so calls can be chained
*/ */
AudioStreamBuilder *setCallback(AudioStreamCallback *streamCallback) { AudioStreamBuilder *setCallback(AudioStreamCallback *streamCallback) {
mStreamCallback = streamCallback; // Use the same callback object for both, dual inheritance.
mDataCallback = streamCallback;
mErrorCallback = streamCallback;
return this; return this;
} }
@ -336,7 +460,7 @@ public:
* On some devices, mono streams might be broken, so a stereo stream might be opened * On some devices, mono streams might be broken, so a stereo stream might be opened
* and converted to mono. * and converted to mono.
* *
* Default is true. * Default is false.
*/ */
AudioStreamBuilder *setChannelConversionAllowed(bool allowed) { AudioStreamBuilder *setChannelConversionAllowed(bool allowed) {
mChannelConversionAllowed = allowed; mChannelConversionAllowed = allowed;
@ -348,7 +472,7 @@ public:
* On some versions of Android, for example, a float stream could not get a * On some versions of Android, for example, a float stream could not get a
* low latency data path. So an I16 stream might be opened and converted to float. * low latency data path. So an I16 stream might be opened and converted to float.
* *
* Default is true. * Default is false.
*/ */
AudioStreamBuilder *setFormatConversionAllowed(bool allowed) { AudioStreamBuilder *setFormatConversionAllowed(bool allowed) {
mFormatConversionAllowed = allowed; mFormatConversionAllowed = allowed;
@ -371,6 +495,43 @@ public:
return this; return this;
} }
/**
* Declare the name of the package creating the stream.
*
* This is usually {@code Context#getPackageName()}.
*
* The default, if you do not call this function, is a random package in the calling uid.
* The vast majority of apps have only one package per calling UID.
* If an invalid package name is set, input streams may not be given permission to
* record when started.
*
* The package name is usually the applicationId in your app's build.gradle file.
*
* Available since API level 31.
*
* @param packageName packageName of the calling app.
*/
AudioStreamBuilder *setPackageName(std::string packageName) {
mPackageName = packageName;
return this;
}
/**
* Declare the attribution tag of the context creating the stream.
*
* This is usually {@code Context#getAttributionTag()}.
*
* The default, if you do not call this function, is null.
*
* Available since API level 31.
*
* @param attributionTag attributionTag of the calling context.
*/
AudioStreamBuilder *setAttributionTag(std::string attributionTag) {
mAttributionTag = attributionTag;
return this;
}
/** /**
* @return true if AAudio will be used based on the current settings. * @return true if AAudio will be used based on the current settings.
*/ */
@ -382,7 +543,8 @@ public:
/** /**
* Create and open a stream object based on the current settings. * Create and open a stream object based on the current settings.
* *
* The caller owns the pointer to the AudioStream object. * The caller owns the pointer to the AudioStream object
* and must delete it when finished.
* *
* @deprecated Use openStream(std::shared_ptr<oboe::AudioStream> &stream) instead. * @deprecated Use openStream(std::shared_ptr<oboe::AudioStream> &stream) instead.
* @param stream pointer to a variable to receive the stream address * @param stream pointer to a variable to receive the stream address
@ -408,6 +570,8 @@ public:
* The caller must create a unique ptr, and pass by reference so it can be * The caller must create a unique ptr, and pass by reference so it can be
* modified to point to an opened stream. The caller owns the unique ptr, * modified to point to an opened stream. The caller owns the unique ptr,
* and it will be automatically closed and deleted when going out of scope. * and it will be automatically closed and deleted when going out of scope.
*
* @deprecated Use openStream(std::shared_ptr<oboe::AudioStream> &stream) instead.
* @param stream Reference to the ManagedStream (uniqueptr) used to keep track of stream * @param stream Reference to the ManagedStream (uniqueptr) used to keep track of stream
* @return OBOE_OK if successful or a negative error code. * @return OBOE_OK if successful or a negative error code.
*/ */

View File

@ -24,15 +24,16 @@ namespace oboe {
class AudioStream; class AudioStream;
/** /**
* AudioStreamCallback defines a callback interface for: * AudioStreamDataCallback defines a callback interface for
* * moving data to/from an audio stream using `onAudioReady`
* 1) moving data to/from an audio stream using `onAudioReady`
* 2) being alerted when a stream has an error using `onError*` methods * 2) being alerted when a stream has an error using `onError*` methods
* *
* It is used with AudioStreamBuilder::setDataCallback().
*/ */
class AudioStreamCallback {
class AudioStreamDataCallback {
public: public:
virtual ~AudioStreamCallback() = default; virtual ~AudioStreamDataCallback() = default;
/** /**
* A buffer is ready for processing. * A buffer is ready for processing.
@ -75,21 +76,68 @@ public:
* If you need to move data, eg. MIDI commands, in or out of the callback function then * If you need to move data, eg. MIDI commands, in or out of the callback function then
* we recommend the use of non-blocking techniques such as an atomic FIFO. * we recommend the use of non-blocking techniques such as an atomic FIFO.
* *
* @param oboeStream pointer to the associated stream * @param audioStream pointer to the associated stream
* @param audioData buffer containing input data or a place to put output data * @param audioData buffer containing input data or a place to put output data
* @param numFrames number of frames to be processed * @param numFrames number of frames to be processed
* @return DataCallbackResult::Continue or DataCallbackResult::Stop * @return DataCallbackResult::Continue or DataCallbackResult::Stop
*/ */
virtual DataCallbackResult onAudioReady( virtual DataCallbackResult onAudioReady(
AudioStream *oboeStream, AudioStream *audioStream,
void *audioData, void *audioData,
int32_t numFrames) = 0; int32_t numFrames) = 0;
};
/** /**
* This will be called when an error occurs on a stream or when the stream is disconnected. * AudioStreamErrorCallback defines a callback interface for
* being alerted when a stream has an error or is disconnected
* using `onError*` methods.
* *
* Note that this will be called on a different thread than the onAudioReady() thread. * Note: This callback is only fired when an AudioStreamCallback is set.
* This thread will be created by Oboe. * If you use AudioStream::write() you have to evaluate the return codes of
* AudioStream::write() to notice errors in the stream.
*
* It is used with AudioStreamBuilder::setErrorCallback().
*/
class AudioStreamErrorCallback {
public:
virtual ~AudioStreamErrorCallback() = default;
/**
* This will be called before other `onError` methods when an error occurs on a stream,
* such as when the stream is disconnected.
*
* It can be used to override and customize the normal error processing.
* Use of this method is considered an advanced technique.
* It might, for example, be used if an app want to use a high level lock when
* closing and reopening a stream.
* Or it might be used when an app want to signal a management thread that handles
* all of the stream state.
*
* If this method returns false it indicates that the stream has *not been stopped and closed
* by the application. In this case it will be stopped by Oboe in the following way:
* onErrorBeforeClose() will be called, then the stream will be closed and onErrorAfterClose()
* will be closed.
*
* If this method returns true it indicates that the stream *has* been stopped and closed
* by the application and Oboe will not do this.
* In that case, the app MUST stop() and close() the stream.
*
* This method will be called on a thread created by Oboe.
*
* @param audioStream pointer to the associated stream
* @param error
* @return true if the stream has been stopped and closed, false if not
*/
virtual bool onError(AudioStream* /* audioStream */, Result /* error */) {
return false;
}
/**
* This will be called when an error occurs on a stream,
* such as when the stream is disconnected,
* and if onError() returns false (indicating that the error has not already been handled).
*
* Note that this will be called on a thread created by Oboe.
* *
* The underlying stream will already be stopped by Oboe but not yet closed. * The underlying stream will already be stopped by Oboe but not yet closed.
* So the stream can be queried. * So the stream can be queried.
@ -97,27 +145,49 @@ public:
* Do not close or delete the stream in this method because it will be * Do not close or delete the stream in this method because it will be
* closed after this method returns. * closed after this method returns.
* *
* @param oboeStream pointer to the associated stream * @param audioStream pointer to the associated stream
* @param error * @param error
*/ */
virtual void onErrorBeforeClose(AudioStream* /* oboeStream */, Result /* error */) {} virtual void onErrorBeforeClose(AudioStream* /* audioStream */, Result /* error */) {}
/** /**
* This will be called when an error occurs on a stream or when the stream is disconnected. * This will be called when an error occurs on a stream,
* such as when the stream is disconnected,
* and if onError() returns false (indicating that the error has not already been handled).
*
* The underlying AAudio or OpenSL ES stream will already be stopped AND closed by Oboe. * The underlying AAudio or OpenSL ES stream will already be stopped AND closed by Oboe.
* So the underlying stream cannot be referenced. * So the underlying stream cannot be referenced.
* But you can still query most parameters. * But you can still query most parameters.
* *
* This callback could be used to reopen a new stream on another device. * This callback could be used to reopen a new stream on another device.
* You can safely delete the old AudioStream in this method.
* *
* @param oboeStream pointer to the associated stream * @param audioStream pointer to the associated stream
* @param error * @param error
*/ */
virtual void onErrorAfterClose(AudioStream* /* oboeStream */, Result /* error */) {} virtual void onErrorAfterClose(AudioStream* /* audioStream */, Result /* error */) {}
}; };
/**
* AudioStreamCallback defines a callback interface for:
*
* 1) moving data to/from an audio stream using `onAudioReady`
* 2) being alerted when a stream has an error using `onError*` methods
*
* It is used with AudioStreamBuilder::setCallback().
*
* It combines the interfaces defined by AudioStreamDataCallback and AudioStreamErrorCallback.
* This was the original callback object. We now recommend using the individual interfaces
* and using setDataCallback() and setErrorCallback().
*
* @deprecated Use `AudioStreamDataCallback` and `AudioStreamErrorCallback` instead
*/
class AudioStreamCallback : public AudioStreamDataCallback,
public AudioStreamErrorCallback {
public:
virtual ~AudioStreamCallback() = default;
};
} // namespace oboe } // namespace oboe
#endif //OBOE_STREAM_CALLBACK_H #endif //OBOE_STREAM_CALLBACK_H

View File

@ -17,7 +17,6 @@
#ifndef OBOE_DEFINITIONS_H #ifndef OBOE_DEFINITIONS_H
#define OBOE_DEFINITIONS_H #define OBOE_DEFINITIONS_H
#include <cstdint> #include <cstdint>
#include <type_traits> #include <type_traits>
@ -108,9 +107,36 @@ namespace oboe {
I16 = 1, // AAUDIO_FORMAT_PCM_I16, I16 = 1, // AAUDIO_FORMAT_PCM_I16,
/** /**
* Single precision floating points. * Single precision floating point.
*
* This is the recommended format for most applications.
* But note that the use of Float may prevent the opening of
* a low-latency input path on OpenSL ES or Legacy AAudio streams.
*/ */
Float = 2, // AAUDIO_FORMAT_PCM_FLOAT, Float = 2, // AAUDIO_FORMAT_PCM_FLOAT,
/**
* Signed 24-bit integers, packed into 3 bytes.
*
* Note that the use of this format does not guarantee that
* the full precision will be provided. The underlying device may
* be using I16 format.
*
* Added in API 31 (S).
*/
I24 = 3, // AAUDIO_FORMAT_PCM_I24_PACKED
/**
* Signed 32-bit integers.
*
* Note that the use of this format does not guarantee that
* the full precision will be provided. The underlying device may
* be using I16 format.
*
* Added in API 31 (S).
*/
I32 = 4, // AAUDIO_FORMAT_PCM_I32
}; };
/** /**
@ -158,7 +184,7 @@ namespace oboe {
Reserved8, Reserved8,
Reserved9, Reserved9,
Reserved10, Reserved10,
ErrorClosed, ErrorClosed = -869,
}; };
/** /**
@ -218,11 +244,14 @@ namespace oboe {
/** /**
* Use OpenSL ES. * Use OpenSL ES.
* Note that OpenSL ES is deprecated in Android 13, API 30 and above.
*/ */
OpenSLES, OpenSLES,
/** /**
* Try to use AAudio. Fail if unavailable. * Try to use AAudio. Fail if unavailable.
* AAudio was first supported in Android 8, API 26 and above.
* It is only recommended for API 27 and above.
*/ */
AAudio AAudio
}; };
@ -242,8 +271,17 @@ namespace oboe {
* This may be implemented using bilinear interpolation. * This may be implemented using bilinear interpolation.
*/ */
Fastest, Fastest,
/**
* Low quality conversion with 8 taps.
*/
Low, Low,
/**
* Medium quality conversion with 16 taps.
*/
Medium, Medium,
/**
* High quality conversion with 32 taps.
*/
High, High,
/** /**
* Highest quality conversion, which may be expensive in terms of CPU. * Highest quality conversion, which may be expensive in terms of CPU.
@ -456,6 +494,160 @@ namespace oboe {
Stereo = 2, Stereo = 2,
}; };
/**
* The channel mask of the audio stream. The underlying type is `uint32_t`.
* Use of this enum is convenient.
*
* ChannelMask::Unspecified means this is not specified.
* The rest of the enums are channel position masks.
* Use the combinations of the channel position masks defined below instead of
* using those values directly.
*/
enum class ChannelMask : uint32_t { // aaudio_channel_mask_t
Unspecified = kUnspecified,
FrontLeft = 1 << 0,
FrontRight = 1 << 1,
FrontCenter = 1 << 2,
LowFrequency = 1 << 3,
BackLeft = 1 << 4,
BackRight = 1 << 5,
FrontLeftOfCenter = 1 << 6,
FrontRightOfCenter = 1 << 7,
BackCenter = 1 << 8,
SideLeft = 1 << 9,
SideRight = 1 << 10,
TopCenter = 1 << 11,
TopFrontLeft = 1 << 12,
TopFrontCenter = 1 << 13,
TopFrontRight = 1 << 14,
TopBackLeft = 1 << 15,
TopBackCenter = 1 << 16,
TopBackRight = 1 << 17,
TopSideLeft = 1 << 18,
TopSideRight = 1 << 19,
BottomFrontLeft = 1 << 20,
BottomFrontCenter = 1 << 21,
BottomFrontRight = 1 << 22,
LowFrequency2 = 1 << 23,
FrontWideLeft = 1 << 24,
FrontWideRight = 1 << 25,
Mono = FrontLeft,
Stereo = FrontLeft |
FrontRight,
CM2Point1 = FrontLeft |
FrontRight |
LowFrequency,
Tri = FrontLeft |
FrontRight |
FrontCenter,
TriBack = FrontLeft |
FrontRight |
BackCenter,
CM3Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency,
CM2Point0Point2 = FrontLeft |
FrontRight |
TopSideLeft |
TopSideRight,
CM2Point1Point2 = CM2Point0Point2 |
LowFrequency,
CM3Point0Point2 = FrontLeft |
FrontRight |
FrontCenter |
TopSideLeft |
TopSideRight,
CM3Point1Point2 = CM3Point0Point2 |
LowFrequency,
Quad = FrontLeft |
FrontRight |
BackLeft |
BackRight,
QuadSide = FrontLeft |
FrontRight |
SideLeft |
SideRight,
Surround = FrontLeft |
FrontRight |
FrontCenter |
BackCenter,
Penta = Quad |
FrontCenter,
// aka 5Point1Back
CM5Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
BackLeft |
BackRight,
CM5Point1Side = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
SideLeft |
SideRight,
CM6Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
BackLeft |
BackRight |
BackCenter,
CM7Point1 = CM5Point1 |
SideLeft |
SideRight,
CM5Point1Point2 = CM5Point1 |
TopSideLeft |
TopSideRight,
CM5Point1Point4 = CM5Point1 |
TopFrontLeft |
TopFrontRight |
TopBackLeft |
TopBackRight,
CM7Point1Point2 = CM7Point1 |
TopSideLeft |
TopSideRight,
CM7Point1Point4 = CM7Point1 |
TopFrontLeft |
TopFrontRight |
TopBackLeft |
TopBackRight,
CM9Point1Point4 = CM7Point1Point4 |
FrontWideLeft |
FrontWideRight,
CM9Point1Point6 = CM9Point1Point4 |
TopSideLeft |
TopSideRight,
FrontBack = FrontCenter |
BackCenter,
};
/** /**
* On API 16 to 26 OpenSL ES will be used. When using OpenSL ES the optimal values for sampleRate and * On API 16 to 26 OpenSL ES will be used. When using OpenSL ES the optimal values for sampleRate and
* framesPerBurst are not known by the native code. * framesPerBurst are not known by the native code.

View File

@ -17,19 +17,35 @@
#ifndef OBOE_FIFOPROCESSOR_H #ifndef OBOE_FIFOPROCESSOR_H
#define OBOE_FIFOPROCESSOR_H #define OBOE_FIFOPROCESSOR_H
#include <unistd.h> #include <memory>
#include <sys/types.h> #include <stdint.h>
#include "common/OboeDebug.h"
#include "FifoControllerBase.h"
#include "oboe/Definitions.h" #include "oboe/Definitions.h"
#include "oboe/FifoControllerBase.h"
namespace oboe { namespace oboe {
class FifoBuffer { class FifoBuffer {
public: public:
/**
* Construct a `FifoBuffer`.
*
* @param bytesPerFrame amount of bytes for one frame
* @param capacityInFrames the capacity of frames in fifo
*/
FifoBuffer(uint32_t bytesPerFrame, uint32_t capacityInFrames); FifoBuffer(uint32_t bytesPerFrame, uint32_t capacityInFrames);
/**
* Construct a `FifoBuffer`.
* To be used if the storage allocation is done outside of FifoBuffer.
*
* @param bytesPerFrame amount of bytes for one frame
* @param capacityInFrames capacity of frames in fifo
* @param readCounterAddress address of read counter
* @param writeCounterAddress address of write counter
* @param dataStorageAddress address of storage
*/
FifoBuffer(uint32_t bytesPerFrame, FifoBuffer(uint32_t bytesPerFrame,
uint32_t capacityInFrames, uint32_t capacityInFrames,
std::atomic<uint64_t> *readCounterAddress, std::atomic<uint64_t> *readCounterAddress,
@ -38,18 +54,36 @@ public:
~FifoBuffer(); ~FifoBuffer();
/**
* Convert a number of frames in bytes.
*
* @return number of bytes
*/
int32_t convertFramesToBytes(int32_t frames); int32_t convertFramesToBytes(int32_t frames);
/** /**
* Read framesToRead or, if not enough, then read as many as are available. * Read framesToRead or, if not enough, then read as many as are available.
*
* @param destination * @param destination
* @param framesToRead number of frames requested * @param framesToRead number of frames requested
* @return number of frames actually read * @return number of frames actually read
*/ */
int32_t read(void *destination, int32_t framesToRead); int32_t read(void *destination, int32_t framesToRead);
/**
* Write framesToWrite or, if too enough, then write as many as the fifo are not empty.
*
* @param destination
* @param framesToWrite number of frames requested
* @return number of frames actually write
*/
int32_t write(const void *source, int32_t framesToWrite); int32_t write(const void *source, int32_t framesToWrite);
/**
* Get the buffer capacity in frames.
*
* @return number of frames
*/
uint32_t getBufferCapacityInFrames() const; uint32_t getBufferCapacityInFrames() const;
/** /**
@ -62,25 +96,56 @@ public:
*/ */
int32_t readNow(void *destination, int32_t numFrames); int32_t readNow(void *destination, int32_t numFrames);
/**
* Get the number of frames in the fifo.
*
* @return number of frames actually in the buffer
*/
uint32_t getFullFramesAvailable() { uint32_t getFullFramesAvailable() {
return mFifo->getFullFramesAvailable(); return mFifo->getFullFramesAvailable();
} }
/**
* Get the amount of bytes per frame.
*
* @return number of bytes per frame
*/
uint32_t getBytesPerFrame() const { uint32_t getBytesPerFrame() const {
return mBytesPerFrame; return mBytesPerFrame;
} }
/**
* Get the position of read counter.
*
* @return position of read counter
*/
uint64_t getReadCounter() const { uint64_t getReadCounter() const {
return mFifo->getReadCounter(); return mFifo->getReadCounter();
} }
/**
* Set the position of read counter.
*
* @param n position of read counter
*/
void setReadCounter(uint64_t n) { void setReadCounter(uint64_t n) {
mFifo->setReadCounter(n); mFifo->setReadCounter(n);
} }
/**
* Get the position of write counter.
*
* @return position of write counter
*/
uint64_t getWriteCounter() { uint64_t getWriteCounter() {
return mFifo->getWriteCounter(); return mFifo->getWriteCounter();
} }
/**
* Set the position of write counter.
*
* @param n position of write counter
*/
void setWriteCounter(uint64_t n) { void setWriteCounter(uint64_t n) {
mFifo->setWriteCounter(n); mFifo->setWriteCounter(n);
} }

View File

@ -18,7 +18,6 @@
#define NATIVEOBOE_FIFOCONTROLLERBASE_H #define NATIVEOBOE_FIFOCONTROLLERBASE_H
#include <stdint.h> #include <stdint.h>
#include <sys/types.h>
namespace oboe { namespace oboe {
@ -36,7 +35,9 @@ class FifoControllerBase {
public: public:
/** /**
* @param totalFrames capacity of the circular buffer in frames. * Construct a `FifoControllerBase`.
*
* @param totalFrames capacity of the circular buffer in frames
*/ */
FifoControllerBase(uint32_t totalFrames); FifoControllerBase(uint32_t totalFrames);
@ -46,35 +47,53 @@ public:
* The frames available to read will be calculated from the read and write counters. * The frames available to read will be calculated from the read and write counters.
* The result will be clipped to the capacity of the buffer. * The result will be clipped to the capacity of the buffer.
* If the buffer has underflowed then this will return zero. * If the buffer has underflowed then this will return zero.
*
* @return number of valid frames available to read. * @return number of valid frames available to read.
*/ */
uint32_t getFullFramesAvailable() const; uint32_t getFullFramesAvailable() const;
/** /**
* The index in a circular buffer of the next frame to read. * The index in a circular buffer of the next frame to read.
*
* @return read index position
*/ */
uint32_t getReadIndex() const; uint32_t getReadIndex() const;
/** /**
* Advance read index from a number of frames.
* Equivalent of incrementReadCounter(numFrames).
*
* @param numFrames number of frames to advance the read index * @param numFrames number of frames to advance the read index
*/ */
void advanceReadIndex(uint32_t numFrames); void advanceReadIndex(uint32_t numFrames);
/** /**
* @return maximum number of frames that can be written without exceeding the threshold. * Get the number of frame that are not written yet.
*
* @return maximum number of frames that can be written without exceeding the threshold
*/ */
uint32_t getEmptyFramesAvailable() const; uint32_t getEmptyFramesAvailable() const;
/** /**
* The index in a circular buffer of the next frame to write. * The index in a circular buffer of the next frame to write.
*
* @return index of the next frame to write
*/ */
uint32_t getWriteIndex() const; uint32_t getWriteIndex() const;
/** /**
* Advance write index from a number of frames.
* Equivalent of incrementWriteCounter(numFrames).
*
* @param numFrames number of frames to advance the write index * @param numFrames number of frames to advance the write index
*/ */
void advanceWriteIndex(uint32_t numFrames); void advanceWriteIndex(uint32_t numFrames);
/**
* Get the frame capacity of the fifo.
*
* @return frame capacity
*/
uint32_t getFrameCapacity() const { return mTotalFrames; } uint32_t getFrameCapacity() const { return mTotalFrames; }
virtual uint64_t getReadCounter() const = 0; virtual uint64_t getReadCounter() const = 0;

View File

@ -33,5 +33,7 @@
#include "oboe/Utilities.h" #include "oboe/Utilities.h"
#include "oboe/Version.h" #include "oboe/Version.h"
#include "oboe/StabilizedCallback.h" #include "oboe/StabilizedCallback.h"
#include "oboe/FifoBuffer.h"
#include "oboe/OboeExtensions.h"
#endif //OBOE_OBOE_H #endif //OBOE_OBOE_H

View File

@ -0,0 +1,64 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_EXTENSIONS_
#define OBOE_EXTENSIONS_
#include <stdint.h>
#include "oboe/Definitions.h"
#include "oboe/AudioStream.h"
namespace oboe {
/**
* The definitions below are only for testing.
* They are not recommended for use in an application.
* They may change or be removed at any time.
*/
class OboeExtensions {
public:
/**
* @returns true if the device supports AAudio MMAP
*/
static bool isMMapSupported();
/**
* @returns true if the AAudio MMAP data path can be selected
*/
static bool isMMapEnabled();
/**
* Controls whether the AAudio MMAP data path can be selected when opening a stream.
* It has no effect after the stream has been opened.
* It only affects the application that calls it. Other apps are not affected.
*
* @param enabled
* @return 0 or a negative error code
*/
static int32_t setMMapEnabled(bool enabled);
/**
* @param oboeStream
* @return true if the AAudio MMAP data path is used on the stream
*/
static bool isMMapUsed(oboe::AudioStream *oboeStream);
};
} // namespace oboe
#endif // OBOE_LATENCY_TUNER_

View File

@ -60,7 +60,7 @@ private:
#if defined(__i386__) || defined(__x86_64__) #if defined(__i386__) || defined(__x86_64__)
#define cpu_relax() asm volatile("rep; nop" ::: "memory"); #define cpu_relax() asm volatile("rep; nop" ::: "memory");
#elif defined(__arm__) || defined(__mips__) #elif defined(__arm__) || defined(__mips__) || defined(__riscv)
#define cpu_relax() asm volatile("":::"memory") #define cpu_relax() asm volatile("":::"memory")
#elif defined(__aarch64__) #elif defined(__aarch64__)

View File

@ -82,6 +82,8 @@ int getPropertyInteger(const char * name, int defaultValue);
*/ */
int getSdkVersion(); int getSdkVersion();
int getChannelCountFromChannelMask(ChannelMask channelMask);
} // namespace oboe } // namespace oboe
#endif //OBOE_UTILITIES_H #endif //OBOE_UTILITIES_H

View File

@ -34,10 +34,10 @@
#define OBOE_VERSION_MAJOR 1 #define OBOE_VERSION_MAJOR 1
// Type: 8-bit unsigned int. Min value: 0 Max value: 255. See below for description. // Type: 8-bit unsigned int. Min value: 0 Max value: 255. See below for description.
#define OBOE_VERSION_MINOR 4 #define OBOE_VERSION_MINOR 7
// Type: 16-bit unsigned int. Min value: 0 Max value: 65535. See below for description. // Type: 16-bit unsigned int. Min value: 0 Max value: 65535. See below for description.
#define OBOE_VERSION_PATCH 2 #define OBOE_VERSION_PATCH 0
#define OBOE_STRINGIFY(x) #x #define OBOE_STRINGIFY(x) #x
#define OBOE_TOSTRING(x) OBOE_STRINGIFY(x) #define OBOE_TOSTRING(x) OBOE_STRINGIFY(x)

View File

@ -0,0 +1,179 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_AAUDIO_EXTENSIONS_H
#define OBOE_AAUDIO_EXTENSIONS_H
#include <dlfcn.h>
#include <stdint.h>
#include <sys/system_properties.h>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "AAudioLoader.h"
namespace oboe {
#define LIB_AAUDIO_NAME "libaaudio.so"
#define FUNCTION_IS_MMAP "AAudioStream_isMMapUsed"
#define FUNCTION_SET_MMAP_POLICY "AAudio_setMMapPolicy"
#define FUNCTION_GET_MMAP_POLICY "AAudio_getMMapPolicy"
#define AAUDIO_ERROR_UNAVAILABLE static_cast<aaudio_result_t>(Result::ErrorUnavailable)
typedef struct AAudioStreamStruct AAudioStream;
/**
* Call some AAudio test routines that are not part of the normal API.
*/
class AAudioExtensions {
public:
AAudioExtensions() {
int32_t policy = getIntegerProperty("aaudio.mmap_policy", 0);
mMMapSupported = isPolicyEnabled(policy);
policy = getIntegerProperty("aaudio.mmap_exclusive_policy", 0);
mMMapExclusiveSupported = isPolicyEnabled(policy);
}
static bool isPolicyEnabled(int32_t policy) {
return (policy == AAUDIO_POLICY_AUTO || policy == AAUDIO_POLICY_ALWAYS);
}
static AAudioExtensions &getInstance() {
static AAudioExtensions instance;
return instance;
}
bool isMMapUsed(oboe::AudioStream *oboeStream) {
AAudioStream *aaudioStream = (AAudioStream *) oboeStream->getUnderlyingStream();
return isMMapUsed(aaudioStream);
}
bool isMMapUsed(AAudioStream *aaudioStream) {
if (loadSymbols()) return false;
if (mAAudioStream_isMMap == nullptr) return false;
return mAAudioStream_isMMap(aaudioStream);
}
/**
* Controls whether the MMAP data path can be selected when opening a stream.
* It has no effect after the stream has been opened.
* It only affects the application that calls it. Other apps are not affected.
*
* @param enabled
* @return 0 or a negative error code
*/
int32_t setMMapEnabled(bool enabled) {
if (loadSymbols()) return AAUDIO_ERROR_UNAVAILABLE;
if (mAAudio_setMMapPolicy == nullptr) return false;
return mAAudio_setMMapPolicy(enabled ? AAUDIO_POLICY_AUTO : AAUDIO_POLICY_NEVER);
}
bool isMMapEnabled() {
if (loadSymbols()) return false;
if (mAAudio_getMMapPolicy == nullptr) return false;
int32_t policy = mAAudio_getMMapPolicy();
return isPolicyEnabled(policy);
}
bool isMMapSupported() {
return mMMapSupported;
}
bool isMMapExclusiveSupported() {
return mMMapExclusiveSupported;
}
private:
enum {
AAUDIO_POLICY_NEVER = 1,
AAUDIO_POLICY_AUTO,
AAUDIO_POLICY_ALWAYS
};
typedef int32_t aaudio_policy_t;
int getIntegerProperty(const char *name, int defaultValue) {
int result = defaultValue;
char valueText[PROP_VALUE_MAX] = {0};
if (__system_property_get(name, valueText) != 0) {
result = atoi(valueText);
}
return result;
}
/**
* Load the function pointers.
* This can be called multiple times.
* It should only be called from one thread.
*
* @return 0 if successful or negative error.
*/
aaudio_result_t loadSymbols() {
if (mAAudio_getMMapPolicy != nullptr) {
return 0;
}
AAudioLoader *libLoader = AAudioLoader::getInstance();
int openResult = libLoader->open();
if (openResult != 0) {
LOGD("%s() could not open " LIB_AAUDIO_NAME, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
void *libHandle = AAudioLoader::getInstance()->getLibHandle();
if (libHandle == nullptr) {
LOGE("%s() could not find " LIB_AAUDIO_NAME, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudioStream_isMMap = (bool (*)(AAudioStream *stream))
dlsym(libHandle, FUNCTION_IS_MMAP);
if (mAAudioStream_isMMap == nullptr) {
LOGI("%s() could not find " FUNCTION_IS_MMAP, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudio_setMMapPolicy = (int32_t (*)(aaudio_policy_t policy))
dlsym(libHandle, FUNCTION_SET_MMAP_POLICY);
if (mAAudio_setMMapPolicy == nullptr) {
LOGI("%s() could not find " FUNCTION_SET_MMAP_POLICY, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudio_getMMapPolicy = (aaudio_policy_t (*)())
dlsym(libHandle, FUNCTION_GET_MMAP_POLICY);
if (mAAudio_getMMapPolicy == nullptr) {
LOGI("%s() could not find " FUNCTION_GET_MMAP_POLICY, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
return 0;
}
bool mMMapSupported = false;
bool mMMapExclusiveSupported = false;
bool (*mAAudioStream_isMMap)(AAudioStream *stream) = nullptr;
int32_t (*mAAudio_setMMapPolicy)(aaudio_policy_t policy) = nullptr;
aaudio_policy_t (*mAAudio_getMMapPolicy)() = nullptr;
};
} // namespace oboe
#endif //OBOE_AAUDIO_EXTENSIONS_H

View File

@ -24,10 +24,17 @@
namespace oboe { namespace oboe {
AAudioLoader::~AAudioLoader() { AAudioLoader::~AAudioLoader() {
if (mLibHandle != nullptr) { // Issue 360: thread_local variables with non-trivial destructors
dlclose(mLibHandle); // will cause segfaults if the containing library is dlclose()ed on
mLibHandle = nullptr; // devices running M or newer, or devices before M when using a static STL.
} // The simple workaround is to not call dlclose.
// https://github.com/android/ndk/wiki/Changelog-r22#known-issues
//
// The libaaudio and libaaudioclient do not use thread_local.
// But, to be safe, we should avoid dlclose() if possible.
// Because AAudioLoader is a static Singleton, we can safely skip
// calling dlclose() without causing a resource leak.
LOGI("%s() dlclose(%s) not called, OK", __func__, LIB_AAUDIO_NAME);
} }
AAudioLoader* AAudioLoader::getInstance() { AAudioLoader* AAudioLoader::getInstance() {
@ -76,6 +83,15 @@ int AAudioLoader::open() {
builder_setSessionId = load_V_PBI("AAudioStreamBuilder_setSessionId"); builder_setSessionId = load_V_PBI("AAudioStreamBuilder_setSessionId");
} }
if (getSdkVersion() >= __ANDROID_API_S__){
builder_setPackageName = load_V_PBCPH("AAudioStreamBuilder_setPackageName");
builder_setAttributionTag = load_V_PBCPH("AAudioStreamBuilder_setAttributionTag");
}
if (getSdkVersion() >= __ANDROID_API_S_V2__) {
builder_setChannelMask = load_V_PBU("AAudioStreamBuilder_setChannelMask");
}
builder_delete = load_I_PB("AAudioStreamBuilder_delete"); builder_delete = load_I_PB("AAudioStreamBuilder_delete");
@ -90,8 +106,6 @@ int AAudioLoader::open() {
stream_getTimestamp = load_I_PSKPLPL("AAudioStream_getTimestamp"); stream_getTimestamp = load_I_PSKPLPL("AAudioStream_getTimestamp");
stream_isMMapUsed = load_B_PS("AAudioStream_isMMapUsed");
stream_getChannelCount = load_I_PS("AAudioStream_getChannelCount"); stream_getChannelCount = load_I_PS("AAudioStream_getChannelCount");
if (stream_getChannelCount == nullptr) { if (stream_getChannelCount == nullptr) {
// Use old alias if needed. // Use old alias if needed.
@ -128,6 +142,10 @@ int AAudioLoader::open() {
stream_getInputPreset = load_I_PS("AAudioStream_getInputPreset"); stream_getInputPreset = load_I_PS("AAudioStream_getInputPreset");
stream_getSessionId = load_I_PS("AAudioStream_getSessionId"); stream_getSessionId = load_I_PS("AAudioStream_getSessionId");
} }
if (getSdkVersion() >= __ANDROID_API_S_V2__) {
stream_getChannelMask = load_U_PS("AAudioStream_getChannelMask");
}
return 0; return 0;
} }
@ -155,6 +173,12 @@ AAudioLoader::signature_V_PBI AAudioLoader::load_V_PBI(const char *functionName)
return reinterpret_cast<signature_V_PBI>(proc); return reinterpret_cast<signature_V_PBI>(proc);
} }
AAudioLoader::signature_V_PBCPH AAudioLoader::load_V_PBCPH(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_V_PBCPH>(proc);
}
AAudioLoader::signature_V_PBPDPV AAudioLoader::load_V_PBPDPV(const char *functionName) { AAudioLoader::signature_V_PBPDPV AAudioLoader::load_V_PBPDPV(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName); void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName); AAudioLoader_check(proc, functionName);
@ -233,10 +257,26 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
return reinterpret_cast<signature_I_PSKPLPL>(proc); return reinterpret_cast<signature_I_PSKPLPL>(proc);
} }
AAudioLoader::signature_V_PBU AAudioLoader::load_V_PBU(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_V_PBU>(proc);
}
AAudioLoader::signature_U_PS AAudioLoader::load_U_PS(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_U_PS>(proc);
}
// Ensure that all AAudio primitive data types are int32_t // Ensure that all AAudio primitive data types are int32_t
#define ASSERT_INT32(type) static_assert(std::is_same<int32_t, type>::value, \ #define ASSERT_INT32(type) static_assert(std::is_same<int32_t, type>::value, \
#type" must be int32_t") #type" must be int32_t")
// Ensure that all AAudio primitive data types are uint32_t
#define ASSERT_UINT32(type) static_assert(std::is_same<uint32_t, type>::value, \
#type" must be uint32_t")
#define ERRMSG "Oboe constants must match AAudio constants." #define ERRMSG "Oboe constants must match AAudio constants."
// These asserts help verify that the Oboe definitions match the equivalent AAudio definitions. // These asserts help verify that the Oboe definitions match the equivalent AAudio definitions.
@ -304,7 +344,6 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
== AAUDIO_PERFORMANCE_MODE_POWER_SAVING, ERRMSG); == AAUDIO_PERFORMANCE_MODE_POWER_SAVING, ERRMSG);
static_assert((int32_t)PerformanceMode::LowLatency static_assert((int32_t)PerformanceMode::LowLatency
== AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, ERRMSG); == AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, ERRMSG);
#endif
// The aaudio_ usage, content and input_preset types were added in NDK 17, // The aaudio_ usage, content and input_preset types were added in NDK 17,
// which is the first version to support Android Pie (API 28). // which is the first version to support Android Pie (API 28).
@ -343,6 +382,69 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
static_assert((int32_t)SessionId::None == AAUDIO_SESSION_ID_NONE, ERRMSG); static_assert((int32_t)SessionId::None == AAUDIO_SESSION_ID_NONE, ERRMSG);
static_assert((int32_t)SessionId::Allocate == AAUDIO_SESSION_ID_ALLOCATE, ERRMSG); static_assert((int32_t)SessionId::Allocate == AAUDIO_SESSION_ID_ALLOCATE, ERRMSG);
#endif // __NDK_MAJOR__ >= 17
// The aaudio channel masks were added in NDK 24,
// which is the first version to support Android SC_V2 (API 32).
#if __NDK_MAJOR__ >= 24
ASSERT_UINT32(aaudio_channel_mask_t);
static_assert((uint32_t)ChannelMask::FrontLeft == AAUDIO_CHANNEL_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontRight == AAUDIO_CHANNEL_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontCenter == AAUDIO_CHANNEL_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::LowFrequency == AAUDIO_CHANNEL_LOW_FREQUENCY, ERRMSG);
static_assert((uint32_t)ChannelMask::BackLeft == AAUDIO_CHANNEL_BACK_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::BackRight == AAUDIO_CHANNEL_BACK_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontLeftOfCenter == AAUDIO_CHANNEL_FRONT_LEFT_OF_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontRightOfCenter == AAUDIO_CHANNEL_FRONT_RIGHT_OF_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::BackCenter == AAUDIO_CHANNEL_BACK_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::SideLeft == AAUDIO_CHANNEL_SIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::SideRight == AAUDIO_CHANNEL_SIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopCenter == AAUDIO_CHANNEL_TOP_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontLeft == AAUDIO_CHANNEL_TOP_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontCenter == AAUDIO_CHANNEL_TOP_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontRight == AAUDIO_CHANNEL_TOP_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackLeft == AAUDIO_CHANNEL_TOP_BACK_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackCenter == AAUDIO_CHANNEL_TOP_BACK_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackRight == AAUDIO_CHANNEL_TOP_BACK_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopSideLeft == AAUDIO_CHANNEL_TOP_SIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopSideRight == AAUDIO_CHANNEL_TOP_SIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontLeft == AAUDIO_CHANNEL_BOTTOM_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontCenter == AAUDIO_CHANNEL_BOTTOM_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontRight == AAUDIO_CHANNEL_BOTTOM_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::LowFrequency2 == AAUDIO_CHANNEL_LOW_FREQUENCY_2, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontWideLeft == AAUDIO_CHANNEL_FRONT_WIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontWideRight == AAUDIO_CHANNEL_FRONT_WIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::Mono == AAUDIO_CHANNEL_MONO, ERRMSG);
static_assert((uint32_t)ChannelMask::Stereo == AAUDIO_CHANNEL_STEREO, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point1 == AAUDIO_CHANNEL_2POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::Tri == AAUDIO_CHANNEL_TRI, ERRMSG);
static_assert((uint32_t)ChannelMask::TriBack == AAUDIO_CHANNEL_TRI_BACK, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point1 == AAUDIO_CHANNEL_3POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point0Point2 == AAUDIO_CHANNEL_2POINT0POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point1Point2 == AAUDIO_CHANNEL_2POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point0Point2 == AAUDIO_CHANNEL_3POINT0POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point1Point2 == AAUDIO_CHANNEL_3POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::Quad == AAUDIO_CHANNEL_QUAD, ERRMSG);
static_assert((uint32_t)ChannelMask::QuadSide == AAUDIO_CHANNEL_QUAD_SIDE, ERRMSG);
static_assert((uint32_t)ChannelMask::Surround == AAUDIO_CHANNEL_SURROUND, ERRMSG);
static_assert((uint32_t)ChannelMask::Penta == AAUDIO_CHANNEL_PENTA, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1 == AAUDIO_CHANNEL_5POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Side == AAUDIO_CHANNEL_5POINT1_SIDE, ERRMSG);
static_assert((uint32_t)ChannelMask::CM6Point1 == AAUDIO_CHANNEL_6POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1 == AAUDIO_CHANNEL_7POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Point2 == AAUDIO_CHANNEL_5POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Point4 == AAUDIO_CHANNEL_5POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1Point2 == AAUDIO_CHANNEL_7POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1Point4 == AAUDIO_CHANNEL_7POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM9Point1Point4 == AAUDIO_CHANNEL_9POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM9Point1Point6 == AAUDIO_CHANNEL_9POINT1POINT6, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontBack == AAUDIO_CHANNEL_FRONT_BACK, ERRMSG);
#endif #endif
#endif // AAUDIO_AAUDIO_H
} // namespace oboe } // namespace oboe

View File

@ -52,17 +52,34 @@ typedef int32_t aaudio_usage_t;
typedef int32_t aaudio_content_type_t; typedef int32_t aaudio_content_type_t;
typedef int32_t aaudio_input_preset_t; typedef int32_t aaudio_input_preset_t;
typedef int32_t aaudio_session_id_t; typedef int32_t aaudio_session_id_t;
// There are a few definitions used by Oboe.
#define AAUDIO_OK static_cast<aaudio_result_t>(Result::OK)
#define AAUDIO_ERROR_TIMEOUT static_cast<aaudio_result_t>(Result::ErrorTimeout)
#define AAUDIO_STREAM_STATE_STARTING static_cast<aaudio_stream_state_t>(StreamState::Starting)
#define AAUDIO_STREAM_STATE_STARTED static_cast<aaudio_stream_state_t>(StreamState::Started)
#else #else
#include <aaudio/AAudio.h> #include <aaudio/AAudio.h>
#include <android/ndk-version.h>
#endif #endif
#ifndef __NDK_MAJOR__ #ifndef __NDK_MAJOR__
#define __NDK_MAJOR__ 0 #define __NDK_MAJOR__ 0
#endif #endif
namespace oboe { #if __NDK_MAJOR__ < 24
// Defined in SC_V2
typedef uint32_t aaudio_channel_mask_t;
#endif
#ifndef __ANDROID_API_S__
#define __ANDROID_API_S__ 31
#endif
#ifndef __ANDROID_API_S_V2__
#define __ANDROID_API_S_V2__ 32
#endif
namespace oboe {
/** /**
* The AAudio API was not available in early versions of Android. * The AAudio API was not available in early versions of Android.
@ -82,6 +99,7 @@ class AAudioLoader {
// P = Pointer to following data type // P = Pointer to following data type
// C = Const prefix // C = Const prefix
// H = cHar // H = cHar
// U = uint32_t
typedef int32_t (*signature_I_PPB)(AAudioStreamBuilder **builder); typedef int32_t (*signature_I_PPB)(AAudioStreamBuilder **builder);
typedef const char * (*signature_CPH_I)(int32_t); typedef const char * (*signature_CPH_I)(int32_t);
@ -93,6 +111,11 @@ class AAudioLoader {
// AAudioStreamBuilder_setSampleRate() // AAudioStreamBuilder_setSampleRate()
typedef void (*signature_V_PBI)(AAudioStreamBuilder *, int32_t); typedef void (*signature_V_PBI)(AAudioStreamBuilder *, int32_t);
// AAudioStreamBuilder_setChannelMask()
typedef void (*signature_V_PBU)(AAudioStreamBuilder *, uint32_t);
typedef void (*signature_V_PBCPH)(AAudioStreamBuilder *, const char *);
typedef int32_t (*signature_I_PS)(AAudioStream *); // AAudioStream_getSampleRate() typedef int32_t (*signature_I_PS)(AAudioStream *); // AAudioStream_getSampleRate()
typedef int64_t (*signature_L_PS)(AAudioStream *); // AAudioStream_getFramesRead() typedef int64_t (*signature_L_PS)(AAudioStream *); // AAudioStream_getFramesRead()
// AAudioStream_setBufferSizeInFrames() // AAudioStream_setBufferSizeInFrames()
@ -120,6 +143,8 @@ class AAudioLoader {
typedef bool (*signature_B_PS)(AAudioStream *); typedef bool (*signature_B_PS)(AAudioStream *);
typedef uint32_t (*signature_U_PS)(AAudioStream *);
static AAudioLoader* getInstance(); // singleton static AAudioLoader* getInstance(); // singleton
/** /**
@ -133,6 +158,8 @@ class AAudioLoader {
*/ */
int open(); int open();
void *getLibHandle() const { return mLibHandle; }
// Function pointers into the AAudio shared library. // Function pointers into the AAudio shared library.
signature_I_PPB createStreamBuilder = nullptr; signature_I_PPB createStreamBuilder = nullptr;
@ -147,12 +174,16 @@ class AAudioLoader {
signature_V_PBI builder_setPerformanceMode = nullptr; signature_V_PBI builder_setPerformanceMode = nullptr;
signature_V_PBI builder_setSampleRate = nullptr; signature_V_PBI builder_setSampleRate = nullptr;
signature_V_PBI builder_setSharingMode = nullptr; signature_V_PBI builder_setSharingMode = nullptr;
signature_V_PBU builder_setChannelMask = nullptr;
signature_V_PBI builder_setUsage = nullptr; signature_V_PBI builder_setUsage = nullptr;
signature_V_PBI builder_setContentType = nullptr; signature_V_PBI builder_setContentType = nullptr;
signature_V_PBI builder_setInputPreset = nullptr; signature_V_PBI builder_setInputPreset = nullptr;
signature_V_PBI builder_setSessionId = nullptr; signature_V_PBI builder_setSessionId = nullptr;
signature_V_PBCPH builder_setPackageName = nullptr;
signature_V_PBCPH builder_setAttributionTag = nullptr;
signature_V_PBPDPV builder_setDataCallback = nullptr; signature_V_PBPDPV builder_setDataCallback = nullptr;
signature_V_PBPEPV builder_setErrorCallback = nullptr; signature_V_PBPEPV builder_setErrorCallback = nullptr;
@ -167,8 +198,6 @@ class AAudioLoader {
signature_I_PSKPLPL stream_getTimestamp = nullptr; signature_I_PSKPLPL stream_getTimestamp = nullptr;
signature_B_PS stream_isMMapUsed = nullptr;
signature_I_PS stream_close = nullptr; signature_I_PS stream_close = nullptr;
signature_I_PS stream_getChannelCount = nullptr; signature_I_PS stream_getChannelCount = nullptr;
@ -199,6 +228,8 @@ class AAudioLoader {
signature_I_PS stream_getInputPreset = nullptr; signature_I_PS stream_getInputPreset = nullptr;
signature_I_PS stream_getSessionId = nullptr; signature_I_PS stream_getSessionId = nullptr;
signature_U_PS stream_getChannelMask = nullptr;
private: private:
AAudioLoader() {} AAudioLoader() {}
~AAudioLoader(); ~AAudioLoader();
@ -207,6 +238,7 @@ class AAudioLoader {
signature_I_PPB load_I_PPB(const char *name); signature_I_PPB load_I_PPB(const char *name);
signature_CPH_I load_CPH_I(const char *name); signature_CPH_I load_CPH_I(const char *name);
signature_V_PBI load_V_PBI(const char *name); signature_V_PBI load_V_PBI(const char *name);
signature_V_PBCPH load_V_PBCPH(const char *name);
signature_V_PBPDPV load_V_PBPDPV(const char *name); signature_V_PBPDPV load_V_PBPDPV(const char *name);
signature_V_PBPEPV load_V_PBPEPV(const char *name); signature_V_PBPEPV load_V_PBPEPV(const char *name);
signature_I_PB load_I_PB(const char *name); signature_I_PB load_I_PB(const char *name);
@ -220,6 +252,8 @@ class AAudioLoader {
signature_I_PSCPVIL load_I_PSCPVIL(const char *name); signature_I_PSCPVIL load_I_PSCPVIL(const char *name);
signature_I_PSTPTL load_I_PSTPTL(const char *name); signature_I_PSTPTL load_I_PSTPTL(const char *name);
signature_I_PSKPLPL load_I_PSKPLPL(const char *name); signature_I_PSKPLPL load_I_PSKPLPL(const char *name);
signature_V_PBU load_V_PBU(const char *name);
signature_U_PS load_U_PS(const char *name);
void *mLibHandle = nullptr; void *mLibHandle = nullptr;
}; };

View File

@ -23,6 +23,7 @@
#include "common/AudioClock.h" #include "common/AudioClock.h"
#include "common/OboeDebug.h" #include "common/OboeDebug.h"
#include "oboe/Utilities.h" #include "oboe/Utilities.h"
#include "AAudioExtensions.h"
#ifdef __ANDROID__ #ifdef __ANDROID__
#include <sys/system_properties.h> #include <sys/system_properties.h>
@ -61,15 +62,17 @@ static aaudio_data_callback_result_t oboe_aaudio_data_callback_proc(
// It calls app error callbacks from a static function in case the stream gets deleted. // It calls app error callbacks from a static function in case the stream gets deleted.
static void oboe_aaudio_error_thread_proc(AudioStreamAAudio *oboeStream, static void oboe_aaudio_error_thread_proc(AudioStreamAAudio *oboeStream,
Result error) { Result error) {
LOGD("%s() - entering >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", __func__); LOGD("%s(,%d) - entering >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", __func__, error);
AudioStreamErrorCallback *errorCallback = oboeStream->getErrorCallback();
if (errorCallback == nullptr) return; // should be impossible
bool isErrorHandled = errorCallback->onError(oboeStream, error);
if (!isErrorHandled) {
oboeStream->requestStop(); oboeStream->requestStop();
if (oboeStream->getCallback() != nullptr) { errorCallback->onErrorBeforeClose(oboeStream, error);
oboeStream->getCallback()->onErrorBeforeClose(oboeStream, error);
}
oboeStream->close(); oboeStream->close();
if (oboeStream->getCallback() != nullptr) {
// Warning, oboeStream may get deleted by this callback. // Warning, oboeStream may get deleted by this callback.
oboeStream->getCallback()->onErrorAfterClose(oboeStream, error); errorCallback->onErrorAfterClose(oboeStream, error);
} }
LOGD("%s() - exiting <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<", __func__); LOGD("%s() - exiting <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<", __func__);
} }
@ -92,7 +95,7 @@ AudioStreamAAudio::AudioStreamAAudio(const AudioStreamBuilder &builder)
: AudioStream(builder) : AudioStream(builder)
, mAAudioStream(nullptr) { , mAAudioStream(nullptr) {
mCallbackThreadEnabled.store(false); mCallbackThreadEnabled.store(false);
isSupported(); mLibLoader = AAudioLoader::getInstance();
} }
bool AudioStreamAAudio::isSupported() { bool AudioStreamAAudio::isSupported() {
@ -101,16 +104,29 @@ bool AudioStreamAAudio::isSupported() {
return openResult == 0; return openResult == 0;
} }
// Static 'C' wrapper for the error callback method. // Static method for the error callback.
// We use a method so we can access protected methods on the stream.
// Launch a thread to handle the error. // Launch a thread to handle the error.
// That other thread can safely stop, close and delete the stream. // That other thread can safely stop, close and delete the stream.
void AudioStreamAAudio::internalErrorCallback( void AudioStreamAAudio::internalErrorCallback(
AAudioStream *stream, AAudioStream *stream,
void *userData, void *userData,
aaudio_result_t error) { aaudio_result_t error) {
oboe::Result oboeResult = static_cast<Result>(error);
AudioStreamAAudio *oboeStream = reinterpret_cast<AudioStreamAAudio*>(userData); AudioStreamAAudio *oboeStream = reinterpret_cast<AudioStreamAAudio*>(userData);
// Prevents deletion of the stream if the app is using AudioStreamBuilder::openSharedStream() // Coerce the error code if needed to workaround a regression in RQ1A that caused
// the wrong code to be passed when headsets plugged in. See b/173928197.
if (OboeGlobals::areWorkaroundsEnabled()
&& getSdkVersion() == __ANDROID_API_R__
&& oboeResult == oboe::Result::ErrorTimeout) {
oboeResult = oboe::Result::ErrorDisconnected;
LOGD("%s() ErrorTimeout changed to ErrorDisconnected to fix b/173928197", __func__);
}
oboeStream->mErrorCallbackResult = oboeResult;
// Prevents deletion of the stream if the app is using AudioStreamBuilder::openStream(shared_ptr)
std::shared_ptr<AudioStream> sharedStream = oboeStream->lockWeakThis(); std::shared_ptr<AudioStream> sharedStream = oboeStream->lockWeakThis();
// These checks should be enough because we assume that the stream close() // These checks should be enough because we assume that the stream close()
@ -118,16 +134,14 @@ void AudioStreamAAudio::internalErrorCallback(
if (oboeStream->wasErrorCallbackCalled()) { // block extra error callbacks if (oboeStream->wasErrorCallbackCalled()) { // block extra error callbacks
LOGE("%s() multiple error callbacks called!", __func__); LOGE("%s() multiple error callbacks called!", __func__);
} else if (stream != oboeStream->getUnderlyingStream()) { } else if (stream != oboeStream->getUnderlyingStream()) {
LOGW("%s() stream already closed", __func__); // can happen if there are bugs LOGW("%s() stream already closed or closing", __func__); // might happen if there are bugs
} else if (sharedStream) { } else if (sharedStream) {
// Handle error on a separate thread using shared pointer. // Handle error on a separate thread using shared pointer.
std::thread t(oboe_aaudio_error_thread_proc_shared, sharedStream, std::thread t(oboe_aaudio_error_thread_proc_shared, sharedStream, oboeResult);
static_cast<Result>(error));
t.detach(); t.detach();
} else { } else {
// Handle error on a separate thread. // Handle error on a separate thread.
std::thread t(oboe_aaudio_error_thread_proc, oboeStream, std::thread t(oboe_aaudio_error_thread_proc, oboeStream, oboeResult);
static_cast<Result>(error));
t.detach(); t.detach();
} }
} }
@ -191,7 +205,18 @@ Result AudioStreamAAudio::open() {
} }
mLibLoader->builder_setBufferCapacityInFrames(aaudioBuilder, capacity); mLibLoader->builder_setBufferCapacityInFrames(aaudioBuilder, capacity);
// Channel mask was added in SC_V2. Given the corresponding channel count of selected channel
// mask may be different from selected channel count, the last set value will be respected.
// If channel count is set after channel mask, the previously set channel mask will be cleared.
// If channel mask is set after channel count, the channel count will be automatically
// calculated from selected channel mask. In that case, only set channel mask when the API
// is available and the channel mask is specified.
if (mLibLoader->builder_setChannelMask != nullptr && mChannelMask != ChannelMask::Unspecified) {
mLibLoader->builder_setChannelMask(aaudioBuilder,
static_cast<aaudio_channel_mask_t>(mChannelMask));
} else {
mLibLoader->builder_setChannelCount(aaudioBuilder, mChannelCount); mLibLoader->builder_setChannelCount(aaudioBuilder, mChannelCount);
}
mLibLoader->builder_setDeviceId(aaudioBuilder, mDeviceId); mLibLoader->builder_setDeviceId(aaudioBuilder, mDeviceId);
mLibLoader->builder_setDirection(aaudioBuilder, static_cast<aaudio_direction_t>(mDirection)); mLibLoader->builder_setDirection(aaudioBuilder, static_cast<aaudio_direction_t>(mDirection));
mLibLoader->builder_setFormat(aaudioBuilder, static_cast<aaudio_format_t>(mFormat)); mLibLoader->builder_setFormat(aaudioBuilder, static_cast<aaudio_format_t>(mFormat));
@ -213,8 +238,13 @@ Result AudioStreamAAudio::open() {
} }
if (mLibLoader->builder_setInputPreset != nullptr) { if (mLibLoader->builder_setInputPreset != nullptr) {
aaudio_input_preset_t inputPreset = mInputPreset;
if (getSdkVersion() <= __ANDROID_API_P__ && inputPreset == InputPreset::VoicePerformance) {
LOGD("InputPreset::VoicePerformance not supported before Q. Using VoiceRecognition.");
inputPreset = InputPreset::VoiceRecognition; // most similar preset
}
mLibLoader->builder_setInputPreset(aaudioBuilder, mLibLoader->builder_setInputPreset(aaudioBuilder,
static_cast<aaudio_input_preset_t>(mInputPreset)); static_cast<aaudio_input_preset_t>(inputPreset));
} }
if (mLibLoader->builder_setSessionId != nullptr) { if (mLibLoader->builder_setSessionId != nullptr) {
@ -222,15 +252,30 @@ Result AudioStreamAAudio::open() {
static_cast<aaudio_session_id_t>(mSessionId)); static_cast<aaudio_session_id_t>(mSessionId));
} }
// TODO get more parameters from the builder? // These were added in S so we have to check for the function pointer.
if (mLibLoader->builder_setPackageName != nullptr && !mPackageName.empty()) {
mLibLoader->builder_setPackageName(aaudioBuilder,
mPackageName.c_str());
}
if (mStreamCallback != nullptr) { if (mLibLoader->builder_setAttributionTag != nullptr && !mAttributionTag.empty()) {
mLibLoader->builder_setAttributionTag(aaudioBuilder,
mAttributionTag.c_str());
}
if (isDataCallbackSpecified()) {
mLibLoader->builder_setDataCallback(aaudioBuilder, oboe_aaudio_data_callback_proc, this); mLibLoader->builder_setDataCallback(aaudioBuilder, oboe_aaudio_data_callback_proc, this);
mLibLoader->builder_setFramesPerDataCallback(aaudioBuilder, getFramesPerCallback()); mLibLoader->builder_setFramesPerDataCallback(aaudioBuilder, getFramesPerDataCallback());
// If the data callback is not being used then the write method will return an error
// and the app can stop and close the stream. if (!isErrorCallbackSpecified()) {
// The app did not specify a callback so we should specify
// our own so the stream gets closed and stopped.
mErrorCallback = &mDefaultErrorCallback;
}
mLibLoader->builder_setErrorCallback(aaudioBuilder, internalErrorCallback, this); mLibLoader->builder_setErrorCallback(aaudioBuilder, internalErrorCallback, this);
} }
// Else if the data callback is not being used then the write method will return an error
// and the app can stop and close the stream.
// ============= OPEN THE STREAM ================ // ============= OPEN THE STREAM ================
{ {
@ -239,6 +284,11 @@ Result AudioStreamAAudio::open() {
mAAudioStream.store(stream); mAAudioStream.store(stream);
} }
if (result != Result::OK) { if (result != Result::OK) {
// Warn developer because ErrorInternal is not very informative.
if (result == Result::ErrorInternal && mDirection == Direction::Input) {
LOGW("AudioStreamAAudio.open() may have failed due to lack of "
"audio recording permission.");
}
goto error2; goto error2;
} }
@ -252,7 +302,7 @@ Result AudioStreamAAudio::open() {
mLibLoader->stream_getPerformanceMode(mAAudioStream)); mLibLoader->stream_getPerformanceMode(mAAudioStream));
mBufferCapacityInFrames = mLibLoader->stream_getBufferCapacity(mAAudioStream); mBufferCapacityInFrames = mLibLoader->stream_getBufferCapacity(mAAudioStream);
mBufferSizeInFrames = mLibLoader->stream_getBufferSize(mAAudioStream); mBufferSizeInFrames = mLibLoader->stream_getBufferSize(mAAudioStream);
mFramesPerBurst = mLibLoader->stream_getFramesPerBurst(mAAudioStream);
// These were added in P so we have to check for the function pointer. // These were added in P so we have to check for the function pointer.
if (mLibLoader->stream_getUsage != nullptr) { if (mLibLoader->stream_getUsage != nullptr) {
@ -270,10 +320,16 @@ Result AudioStreamAAudio::open() {
mSessionId = SessionId::None; mSessionId = SessionId::None;
} }
if (mLibLoader->stream_getChannelMask != nullptr) {
mChannelMask = static_cast<ChannelMask>(mLibLoader->stream_getChannelMask(mAAudioStream));
}
LOGD("AudioStreamAAudio.open() format=%d, sampleRate=%d, capacity = %d", LOGD("AudioStreamAAudio.open() format=%d, sampleRate=%d, capacity = %d",
static_cast<int>(mFormat), static_cast<int>(mSampleRate), static_cast<int>(mFormat), static_cast<int>(mSampleRate),
static_cast<int>(mBufferCapacityInFrames)); static_cast<int>(mBufferCapacityInFrames));
calculateDefaultDelayBeforeCloseMillis();
error2: error2:
mLibLoader->builder_delete(aaudioBuilder); mLibLoader->builder_delete(aaudioBuilder);
LOGD("AudioStreamAAudio.open: AAudioStream_Open() returned %s", LOGD("AudioStreamAAudio.open: AAudioStream_Open() returned %s",
@ -282,24 +338,49 @@ error2:
} }
Result AudioStreamAAudio::close() { Result AudioStreamAAudio::close() {
// The main reason we have this mutex if to prevent a collision between a call // Prevent two threads from closing the stream at the same time and crashing.
// by the application to stop a stream at the same time that an onError callback // This could occur, for example, if an application called close() at the same
// is being executed because of a disconnect. The close will delete the stream, // time that an onError callback was being executed because of a disconnect.
// which could otherwise cause the requestStop() to crash.
std::lock_guard<std::mutex> lock(mLock); std::lock_guard<std::mutex> lock(mLock);
AudioStream::close(); AudioStream::close();
// This will delete the AAudio stream object so we need to null out the pointer. AAudioStream *stream = nullptr;
AAudioStream *stream = mAAudioStream.exchange(nullptr); {
// Wait for any methods using mAAudioStream to finish.
std::unique_lock<std::shared_mutex> lock2(mAAudioStreamLock);
// Closing will delete *mAAudioStream so we need to null out the pointer atomically.
stream = mAAudioStream.exchange(nullptr);
}
if (stream != nullptr) { if (stream != nullptr) {
if (OboeGlobals::areWorkaroundsEnabled()) {
// Make sure we are really stopped. Do it under mLock
// so another thread cannot call requestStart() right before the close.
requestStop_l(stream);
sleepBeforeClose();
}
return static_cast<Result>(mLibLoader->stream_close(stream)); return static_cast<Result>(mLibLoader->stream_close(stream));
} else { } else {
return Result::ErrorClosed; return Result::ErrorClosed;
} }
} }
DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream *stream, static void oboe_stop_thread_proc(AudioStream *oboeStream) {
if (oboeStream != nullptr) {
oboeStream->requestStop();
}
}
void AudioStreamAAudio::launchStopThread() {
// Prevent multiple stop threads from being launched.
if (mStopThreadAllowed.exchange(false)) {
// Stop this stream on a separate thread
std::thread t(oboe_stop_thread_proc, this);
t.detach();
}
}
DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream * /*stream*/,
void *audioData, void *audioData,
int32_t numFrames) { int32_t numFrames) {
DataCallbackResult result = fireDataCallback(audioData, numFrames); DataCallbackResult result = fireDataCallback(audioData, numFrames);
@ -312,16 +393,12 @@ DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream *stream,
LOGE("Oboe callback returned unexpected value = %d", result); LOGE("Oboe callback returned unexpected value = %d", result);
} }
if (getSdkVersion() <= __ANDROID_API_P__) { // Returning Stop caused various problems before S. See #1230
if (OboeGlobals::areWorkaroundsEnabled() && getSdkVersion() <= __ANDROID_API_R__) {
launchStopThread(); launchStopThread();
if (isMMapUsed()) {
return DataCallbackResult::Stop;
} else {
// Legacy stream <= API_P cannot be restarted after returning Stop.
return DataCallbackResult::Continue; return DataCallbackResult::Continue;
}
} else { } else {
return DataCallbackResult::Stop; // OK >= API_Q return DataCallbackResult::Stop; // OK >= API_S
} }
} }
} }
@ -338,9 +415,10 @@ Result AudioStreamAAudio::requestStart() {
return Result::OK; return Result::OK;
} }
} }
if (mStreamCallback != nullptr) { // Was a callback requested? if (isDataCallbackSpecified()) {
setDataCallbackEnabled(true); setDataCallbackEnabled(true);
} }
mStopThreadAllowed = true;
return static_cast<Result>(mLibLoader->stream_requestStart(stream)); return static_cast<Result>(mLibLoader->stream_requestStart(stream));
} else { } else {
return Result::ErrorClosed; return Result::ErrorClosed;
@ -385,6 +463,14 @@ Result AudioStreamAAudio::requestStop() {
std::lock_guard<std::mutex> lock(mLock); std::lock_guard<std::mutex> lock(mLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
return requestStop_l(stream);
} else {
return Result::ErrorClosed;
}
}
// Call under mLock
Result AudioStreamAAudio::requestStop_l(AAudioStream *stream) {
// Avoid state machine errors in O_MR1. // Avoid state machine errors in O_MR1.
if (getSdkVersion() <= __ANDROID_API_O_MR1__) { if (getSdkVersion() <= __ANDROID_API_O_MR1__) {
StreamState state = static_cast<StreamState>(mLibLoader->stream_getState(stream)); StreamState state = static_cast<StreamState>(mLibLoader->stream_getState(stream));
@ -393,14 +479,12 @@ Result AudioStreamAAudio::requestStop() {
} }
} }
return static_cast<Result>(mLibLoader->stream_requestStop(stream)); return static_cast<Result>(mLibLoader->stream_requestStop(stream));
} else {
return Result::ErrorClosed;
}
} }
ResultWithValue<int32_t> AudioStreamAAudio::write(const void *buffer, ResultWithValue<int32_t> AudioStreamAAudio::write(const void *buffer,
int32_t numFrames, int32_t numFrames,
int64_t timeoutNanoseconds) { int64_t timeoutNanoseconds) {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
int32_t result = mLibLoader->stream_write(mAAudioStream, buffer, int32_t result = mLibLoader->stream_write(mAAudioStream, buffer,
@ -414,6 +498,7 @@ ResultWithValue<int32_t> AudioStreamAAudio::write(const void *buffer,
ResultWithValue<int32_t> AudioStreamAAudio::read(void *buffer, ResultWithValue<int32_t> AudioStreamAAudio::read(void *buffer,
int32_t numFrames, int32_t numFrames,
int64_t timeoutNanoseconds) { int64_t timeoutNanoseconds) {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
int32_t result = mLibLoader->stream_read(mAAudioStream, buffer, int32_t result = mLibLoader->stream_read(mAAudioStream, buffer,
@ -497,29 +582,27 @@ Result AudioStreamAAudio::waitForStateChange(StreamState currentState,
} }
ResultWithValue<int32_t> AudioStreamAAudio::setBufferSizeInFrames(int32_t requestedFrames) { ResultWithValue<int32_t> AudioStreamAAudio::setBufferSizeInFrames(int32_t requestedFrames) {
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
int32_t adjustedFrames = requestedFrames; int32_t adjustedFrames = requestedFrames;
if (adjustedFrames > mBufferCapacityInFrames) { if (adjustedFrames > mBufferCapacityInFrames) {
adjustedFrames = mBufferCapacityInFrames; adjustedFrames = mBufferCapacityInFrames;
} }
// This calls getBufferSize() so avoid recursive lock.
adjustedFrames = QuirksManager::getInstance().clipBufferSize(*this, adjustedFrames); adjustedFrames = QuirksManager::getInstance().clipBufferSize(*this, adjustedFrames);
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
int32_t newBufferSize = mLibLoader->stream_setBufferSize(mAAudioStream, adjustedFrames); int32_t newBufferSize = mLibLoader->stream_setBufferSize(mAAudioStream, adjustedFrames);
// Cache the result if it's valid // Cache the result if it's valid
if (newBufferSize > 0) mBufferSizeInFrames = newBufferSize; if (newBufferSize > 0) mBufferSizeInFrames = newBufferSize;
return ResultWithValue<int32_t>::createBasedOnSign(newBufferSize); return ResultWithValue<int32_t>::createBasedOnSign(newBufferSize);
} else { } else {
return ResultWithValue<int32_t>(Result::ErrorClosed); return ResultWithValue<int32_t>(Result::ErrorClosed);
} }
} }
StreamState AudioStreamAAudio::getState() const { StreamState AudioStreamAAudio::getState() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
aaudio_stream_state_t aaudioState = mLibLoader->stream_getState(stream); aaudio_stream_state_t aaudioState = mLibLoader->stream_getState(stream);
@ -536,6 +619,7 @@ StreamState AudioStreamAAudio::getState() const {
} }
int32_t AudioStreamAAudio::getBufferSizeInFrames() { int32_t AudioStreamAAudio::getBufferSizeInFrames() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
mBufferSizeInFrames = mLibLoader->stream_getBufferSize(stream); mBufferSizeInFrames = mLibLoader->stream_getBufferSize(stream);
@ -543,29 +627,34 @@ int32_t AudioStreamAAudio::getBufferSizeInFrames() {
return mBufferSizeInFrames; return mBufferSizeInFrames;
} }
int32_t AudioStreamAAudio::getFramesPerBurst() {
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
mFramesPerBurst = mLibLoader->stream_getFramesPerBurst(stream);
}
return mFramesPerBurst;
}
void AudioStreamAAudio::updateFramesRead() { void AudioStreamAAudio::updateFramesRead() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
// Set to 1 for debugging race condition #1180 with mAAudioStream.
// See also DEBUG_CLOSE_RACE in OboeTester.
// This was left in the code so that we could test the fix again easily in the future.
// We could not trigger the race condition without adding these get calls and the sleeps.
#define DEBUG_CLOSE_RACE 0
#if DEBUG_CLOSE_RACE
// This is used when testing race conditions with close().
// See DEBUG_CLOSE_RACE in OboeTester
AudioClock::sleepForNanos(400 * kNanosPerMillisecond);
#endif // DEBUG_CLOSE_RACE
if (stream != nullptr) { if (stream != nullptr) {
mFramesRead = mLibLoader->stream_getFramesRead(stream); mFramesRead = mLibLoader->stream_getFramesRead(stream);
} }
} }
void AudioStreamAAudio::updateFramesWritten() { void AudioStreamAAudio::updateFramesWritten() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
mFramesWritten = mLibLoader->stream_getFramesWritten(stream); mFramesWritten = mLibLoader->stream_getFramesWritten(stream);
} }
} }
ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() const { ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
return ResultWithValue<int32_t>::createBasedOnSign(mLibLoader->stream_getXRunCount(stream)); return ResultWithValue<int32_t>::createBasedOnSign(mLibLoader->stream_getXRunCount(stream));
@ -577,11 +666,12 @@ ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() const {
Result AudioStreamAAudio::getTimestamp(clockid_t clockId, Result AudioStreamAAudio::getTimestamp(clockid_t clockId,
int64_t *framePosition, int64_t *framePosition,
int64_t *timeNanoseconds) { int64_t *timeNanoseconds) {
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
if (getState() != StreamState::Started) { if (getState() != StreamState::Started) {
return Result::ErrorInvalidState; return Result::ErrorInvalidState;
} }
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
return static_cast<Result>(mLibLoader->stream_getTimestamp(stream, clockId, return static_cast<Result>(mLibLoader->stream_getTimestamp(stream, clockId,
framePosition, timeNanoseconds)); framePosition, timeNanoseconds));
} else { } else {
@ -590,11 +680,6 @@ Result AudioStreamAAudio::getTimestamp(clockid_t clockId,
} }
ResultWithValue<double> AudioStreamAAudio::calculateLatencyMillis() { ResultWithValue<double> AudioStreamAAudio::calculateLatencyMillis() {
AAudioStream *stream = mAAudioStream.load();
if (stream == nullptr) {
return ResultWithValue<double>(Result::ErrorClosed);
}
// Get the time that a known audio frame was presented. // Get the time that a known audio frame was presented.
int64_t hardwareFrameIndex; int64_t hardwareFrameIndex;
int64_t hardwareFrameHardwareTime; int64_t hardwareFrameHardwareTime;
@ -632,9 +717,10 @@ ResultWithValue<double> AudioStreamAAudio::calculateLatencyMillis() {
} }
bool AudioStreamAAudio::isMMapUsed() { bool AudioStreamAAudio::isMMapUsed() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load(); AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) { if (stream != nullptr) {
return mLibLoader->stream_isMMapUsed(stream); return AAudioExtensions::getInstance().isMMapUsed(stream);
} else { } else {
return false; return false;
} }

View File

@ -18,6 +18,7 @@
#define OBOE_STREAM_AAUDIO_H_ #define OBOE_STREAM_AAUDIO_H_
#include <atomic> #include <atomic>
#include <shared_mutex>
#include <mutex> #include <mutex>
#include <thread> #include <thread>
@ -67,8 +68,7 @@ public:
ResultWithValue<int32_t> setBufferSizeInFrames(int32_t requestedFrames) override; ResultWithValue<int32_t> setBufferSizeInFrames(int32_t requestedFrames) override;
int32_t getBufferSizeInFrames() override; int32_t getBufferSizeInFrames() override;
int32_t getFramesPerBurst() override; ResultWithValue<int32_t> getXRunCount() override;
ResultWithValue<int32_t> getXRunCount() const override;
bool isXRunCountSupported() const override { return true; } bool isXRunCountSupported() const override { return true; }
ResultWithValue<double> calculateLatencyMillis() override; ResultWithValue<double> calculateLatencyMillis() override;
@ -81,7 +81,7 @@ public:
int64_t *framePosition, int64_t *framePosition,
int64_t *timeNanoseconds) override; int64_t *timeNanoseconds) override;
StreamState getState() const override; StreamState getState() override;
AudioApi getAudioApi() const override { AudioApi getAudioApi() const override {
return AudioApi::AAudio; return AudioApi::AAudio;
@ -108,14 +108,33 @@ protected:
void logUnsupportedAttributes(); void logUnsupportedAttributes();
private:
// Must call under mLock. And stream must NOT be nullptr.
Result requestStop_l(AAudioStream *stream);
/**
* Launch a thread that will stop the stream.
*/
void launchStopThread();
public:
int32_t getMDelayBeforeCloseMillis() const;
void setDelayBeforeCloseMillis(int32_t mDelayBeforeCloseMillis);
private: private:
std::atomic<bool> mCallbackThreadEnabled; std::atomic<bool> mCallbackThreadEnabled;
std::atomic<bool> mStopThreadAllowed{false};
// pointer to the underlying AAudio stream, valid if open, null if closed // pointer to the underlying 'C' AAudio stream, valid if open, null if closed
std::atomic<AAudioStream *> mAAudioStream{nullptr}; std::atomic<AAudioStream *> mAAudioStream{nullptr};
std::shared_mutex mAAudioStreamLock; // to protect mAAudioStream while closing
static AAudioLoader *mLibLoader; static AAudioLoader *mLibLoader;
// We may not use this but it is so small that it is not worth allocating dynamically.
AudioStreamErrorCallback mDefaultErrorCallback;
}; };
} // namespace oboe } // namespace oboe

View File

@ -20,7 +20,7 @@ using namespace oboe;
using namespace flowgraph; using namespace flowgraph;
int32_t AudioSourceCaller::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) { int32_t AudioSourceCaller::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) {
oboe::AudioStreamCallback *callback = mStream->getCallback(); AudioStreamDataCallback *callback = mStream->getDataCallback();
int32_t result = 0; int32_t result = 0;
int32_t numFrames = numBytes / mStream->getBytesPerFrame(); int32_t numFrames = numBytes / mStream->getBytesPerFrame();
if (callback != nullptr) { if (callback != nullptr) {

View File

@ -55,14 +55,14 @@ void AudioStream::checkScheduler() {
DataCallbackResult AudioStream::fireDataCallback(void *audioData, int32_t numFrames) { DataCallbackResult AudioStream::fireDataCallback(void *audioData, int32_t numFrames) {
if (!isDataCallbackEnabled()) { if (!isDataCallbackEnabled()) {
LOGW("AudioStream::%s() called with data callback disabled!", __func__); LOGW("AudioStream::%s() called with data callback disabled!", __func__);
return DataCallbackResult::Stop; // We should not be getting called any more. return DataCallbackResult::Stop; // Should not be getting called
} }
DataCallbackResult result; DataCallbackResult result;
if (mStreamCallback == nullptr) { if (mDataCallback) {
result = onDefaultCallback(audioData, numFrames); result = mDataCallback->onAudioReady(this, audioData, numFrames);
} else { } else {
result = mStreamCallback->onAudioReady(this, audioData, numFrames); result = onDefaultCallback(audioData, numFrames);
} }
// On Oreo, we might get called after returning stop. // On Oreo, we might get called after returning stop.
// So block that here. // So block that here.
@ -196,16 +196,13 @@ ResultWithValue<FrameTimestamp> AudioStream::getTimestamp(clockid_t clockId) {
} }
} }
static void oboe_stop_thread_proc(AudioStream *oboeStream) { void AudioStream::calculateDefaultDelayBeforeCloseMillis() {
if (oboeStream != nullptr) { // Calculate delay time before close based on burst duration.
oboeStream->requestStop(); // Start with a burst duration then add 1 msec as a safety margin.
} mDelayBeforeCloseMillis = std::max(kMinDelayBeforeCloseMillis,
} 1 + ((mFramesPerBurst * 1000) / getSampleRate()));
LOGD("calculateDefaultDelayBeforeCloseMillis() default = %d",
void AudioStream::launchStopThread() { static_cast<int>(mDelayBeforeCloseMillis));
// Stop this stream on a separate thread
std::thread t(oboe_stop_thread_proc, this);
t.detach();
} }
} // namespace oboe } // namespace oboe

View File

@ -16,6 +16,8 @@
#include <sys/types.h> #include <sys/types.h>
#include "aaudio/AAudioExtensions.h"
#include "aaudio/AudioStreamAAudio.h" #include "aaudio/AudioStreamAAudio.h"
#include "FilterAudioStream.h" #include "FilterAudioStream.h"
#include "OboeDebug.h" #include "OboeDebug.h"
@ -80,13 +82,19 @@ AudioStream *AudioStreamBuilder::build() {
} }
bool AudioStreamBuilder::isCompatible(AudioStreamBase &other) { bool AudioStreamBuilder::isCompatible(AudioStreamBase &other) {
return getSampleRate() == other.getSampleRate() return (getSampleRate() == oboe::Unspecified || getSampleRate() == other.getSampleRate())
&& getFormat() == other.getFormat() && (getFormat() == (AudioFormat)oboe::Unspecified || getFormat() == other.getFormat())
&& getChannelCount() == other.getChannelCount(); && (getFramesPerDataCallback() == oboe::Unspecified || getFramesPerDataCallback() == other.getFramesPerDataCallback())
&& (getChannelCount() == oboe::Unspecified || getChannelCount() == other.getChannelCount());
} }
Result AudioStreamBuilder::openStream(AudioStream **streamPP) { Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
Result result = Result::OK; auto result = isValidConfig();
if (result != Result::OK) {
LOGW("%s() invalid config %d", __func__, result);
return result;
}
LOGI("%s() %s -------- %s --------", LOGI("%s() %s -------- %s --------",
__func__, getDirection() == Direction::Input ? "INPUT" : "OUTPUT", getVersionText()); __func__, getDirection() == Direction::Input ? "INPUT" : "OUTPUT", getVersionText());
@ -104,14 +112,13 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
// Do we need to make a child stream and convert. // Do we need to make a child stream and convert.
if (conversionNeeded) { if (conversionNeeded) {
AudioStream *tempStream; AudioStream *tempStream;
result = childBuilder.openStream(&tempStream); result = childBuilder.openStream(&tempStream);
if (result != Result::OK) { if (result != Result::OK) {
return result; return result;
} }
if (isCompatible(*tempStream)) { if (isCompatible(*tempStream)) {
// Everything matches so we can just use the child stream directly. // The child stream would work as the requested stream so we can just use it directly.
*streamPP = tempStream; *streamPP = tempStream;
return result; return result;
} else { } else {
@ -126,6 +133,9 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
if (getSampleRate() == oboe::Unspecified) { if (getSampleRate() == oboe::Unspecified) {
parentBuilder.setSampleRate(tempStream->getSampleRate()); parentBuilder.setSampleRate(tempStream->getSampleRate());
} }
if (getFramesPerDataCallback() == oboe::Unspecified) {
parentBuilder.setFramesPerCallback(tempStream->getFramesPerDataCallback());
}
// Use childStream in a FilterAudioStream. // Use childStream in a FilterAudioStream.
LOGI("%s() create a FilterAudioStream for data conversion.", __func__); LOGI("%s() create a FilterAudioStream for data conversion.", __func__);
@ -148,7 +158,20 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
} }
} }
result = streamP->open(); // TODO review API // If MMAP has a problem in this case then disable it temporarily.
bool wasMMapOriginallyEnabled = AAudioExtensions::getInstance().isMMapEnabled();
bool wasMMapTemporarilyDisabled = false;
if (wasMMapOriginallyEnabled) {
bool isMMapSafe = QuirksManager::getInstance().isMMapSafe(childBuilder);
if (!isMMapSafe) {
AAudioExtensions::getInstance().setMMapEnabled(false);
wasMMapTemporarilyDisabled = true;
}
}
result = streamP->open();
if (wasMMapTemporarilyDisabled) {
AAudioExtensions::getInstance().setMMapEnabled(wasMMapOriginallyEnabled); // restore original
}
if (result == Result::OK) { if (result == Result::OK) {
int32_t optimalBufferSize = -1; int32_t optimalBufferSize = -1;

View File

@ -20,16 +20,21 @@
#include "DataConversionFlowGraph.h" #include "DataConversionFlowGraph.h"
#include "SourceFloatCaller.h" #include "SourceFloatCaller.h"
#include "SourceI16Caller.h" #include "SourceI16Caller.h"
#include "SourceI24Caller.h"
#include "SourceI32Caller.h"
#include <flowgraph/ClipToRange.h> #include <flowgraph/ClipToRange.h>
#include <flowgraph/MonoToMultiConverter.h> #include <flowgraph/MonoToMultiConverter.h>
#include <flowgraph/MultiToMonoConverter.h>
#include <flowgraph/RampLinear.h> #include <flowgraph/RampLinear.h>
#include <flowgraph/SinkFloat.h> #include <flowgraph/SinkFloat.h>
#include <flowgraph/SinkI16.h> #include <flowgraph/SinkI16.h>
#include <flowgraph/SinkI24.h> #include <flowgraph/SinkI24.h>
#include <flowgraph/SinkI32.h>
#include <flowgraph/SourceFloat.h> #include <flowgraph/SourceFloat.h>
#include <flowgraph/SourceI16.h> #include <flowgraph/SourceI16.h>
#include <flowgraph/SourceI24.h> #include <flowgraph/SourceI24.h>
#include <flowgraph/SourceI32.h>
#include <flowgraph/SampleRateConverter.h> #include <flowgraph/SampleRateConverter.h>
using namespace oboe; using namespace oboe;
@ -81,34 +86,47 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
AudioFormat sourceFormat = sourceStream->getFormat(); AudioFormat sourceFormat = sourceStream->getFormat();
int32_t sourceChannelCount = sourceStream->getChannelCount(); int32_t sourceChannelCount = sourceStream->getChannelCount();
int32_t sourceSampleRate = sourceStream->getSampleRate(); int32_t sourceSampleRate = sourceStream->getSampleRate();
int32_t sourceFramesPerCallback = sourceStream->getFramesPerDataCallback();
AudioFormat sinkFormat = sinkStream->getFormat(); AudioFormat sinkFormat = sinkStream->getFormat();
int32_t sinkChannelCount = sinkStream->getChannelCount(); int32_t sinkChannelCount = sinkStream->getChannelCount();
int32_t sinkSampleRate = sinkStream->getSampleRate(); int32_t sinkSampleRate = sinkStream->getSampleRate();
int32_t sinkFramesPerCallback = sinkStream->getFramesPerDataCallback();
LOGI("%s() flowgraph converts channels: %d to %d, format: %d to %d, rate: %d to %d, qual = %d", LOGI("%s() flowgraph converts channels: %d to %d, format: %d to %d"
", rate: %d to %d, cbsize: %d to %d, qual = %d",
__func__, __func__,
sourceChannelCount, sinkChannelCount, sourceChannelCount, sinkChannelCount,
sourceFormat, sinkFormat, sourceFormat, sinkFormat,
sourceSampleRate, sinkSampleRate, sourceSampleRate, sinkSampleRate,
sourceFramesPerCallback, sinkFramesPerCallback,
sourceStream->getSampleRateConversionQuality()); sourceStream->getSampleRateConversionQuality());
int32_t framesPerCallback = (sourceStream->getFramesPerCallback() == kUnspecified)
? sourceStream->getFramesPerBurst()
: sourceStream->getFramesPerCallback();
// Source // Source
// If OUTPUT and using a callback then call back to the app using a SourceCaller. // IF OUTPUT and using a callback then call back to the app using a SourceCaller.
// If INPUT and NOT using a callback then read from the child stream using a SourceCaller. // OR IF INPUT and NOT using a callback then read from the child stream using a SourceCaller.
if ((sourceStream->getCallback() != nullptr && isOutput) bool isDataCallbackSpecified = sourceStream->isDataCallbackSpecified();
|| (sourceStream->getCallback() == nullptr && isInput)) { if ((isDataCallbackSpecified && isOutput)
|| (!isDataCallbackSpecified && isInput)) {
int32_t actualSourceFramesPerCallback = (sourceFramesPerCallback == kUnspecified)
? sourceStream->getFramesPerBurst()
: sourceFramesPerCallback;
switch (sourceFormat) { switch (sourceFormat) {
case AudioFormat::Float: case AudioFormat::Float:
mSourceCaller = std::make_unique<SourceFloatCaller>(sourceChannelCount, mSourceCaller = std::make_unique<SourceFloatCaller>(sourceChannelCount,
framesPerCallback); actualSourceFramesPerCallback);
break; break;
case AudioFormat::I16: case AudioFormat::I16:
mSourceCaller = std::make_unique<SourceI16Caller>(sourceChannelCount, mSourceCaller = std::make_unique<SourceI16Caller>(sourceChannelCount,
framesPerCallback); actualSourceFramesPerCallback);
break;
case AudioFormat::I24:
mSourceCaller = std::make_unique<SourceI24Caller>(sourceChannelCount,
actualSourceFramesPerCallback);
break;
case AudioFormat::I32:
mSourceCaller = std::make_unique<SourceI32Caller>(sourceChannelCount,
actualSourceFramesPerCallback);
break; break;
default: default:
LOGE("%s() Unsupported source caller format = %d", __func__, sourceFormat); LOGE("%s() Unsupported source caller format = %d", __func__, sourceFormat);
@ -117,8 +135,8 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
mSourceCaller->setStream(sourceStream); mSourceCaller->setStream(sourceStream);
lastOutput = &mSourceCaller->output; lastOutput = &mSourceCaller->output;
} else { } else {
// If OUTPUT and NOT using a callback then write to the child stream using a BlockWriter. // IF OUTPUT and NOT using a callback then write to the child stream using a BlockWriter.
// If INPUT and using a callback then write to the app using a BlockWriter. // OR IF INPUT and using a callback then write to the app using a BlockWriter.
switch (sourceFormat) { switch (sourceFormat) {
case AudioFormat::Float: case AudioFormat::Float:
mSource = std::make_unique<SourceFloat>(sourceChannelCount); mSource = std::make_unique<SourceFloat>(sourceChannelCount);
@ -126,40 +144,72 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
case AudioFormat::I16: case AudioFormat::I16:
mSource = std::make_unique<SourceI16>(sourceChannelCount); mSource = std::make_unique<SourceI16>(sourceChannelCount);
break; break;
case AudioFormat::I24:
mSource = std::make_unique<SourceI24>(sourceChannelCount);
break;
case AudioFormat::I32:
mSource = std::make_unique<SourceI32>(sourceChannelCount);
break;
default: default:
LOGE("%s() Unsupported source format = %d", __func__, sourceFormat); LOGE("%s() Unsupported source format = %d", __func__, sourceFormat);
return Result::ErrorIllegalArgument; return Result::ErrorIllegalArgument;
} }
if (isInput) { if (isInput) {
int32_t actualSinkFramesPerCallback = (sinkFramesPerCallback == kUnspecified)
? sinkStream->getFramesPerBurst()
: sinkFramesPerCallback;
// The BlockWriter is after the Sink so use the SinkStream size. // The BlockWriter is after the Sink so use the SinkStream size.
mBlockWriter.open(framesPerCallback * sinkStream->getBytesPerFrame()); mBlockWriter.open(actualSinkFramesPerCallback * sinkStream->getBytesPerFrame());
mAppBuffer = std::make_unique<uint8_t[]>( mAppBuffer = std::make_unique<uint8_t[]>(
kDefaultBufferSize * sinkStream->getBytesPerFrame()); kDefaultBufferSize * sinkStream->getBytesPerFrame());
} }
lastOutput = &mSource->output; lastOutput = &mSource->output;
} }
// If we are going to reduce the number of channels then do it before the
// sample rate converter.
if (sourceChannelCount > sinkChannelCount) {
if (sinkChannelCount == 1) {
mMultiToMonoConverter = std::make_unique<MultiToMonoConverter>(sourceChannelCount);
lastOutput->connect(&mMultiToMonoConverter->input);
lastOutput = &mMultiToMonoConverter->output;
} else {
mChannelCountConverter = std::make_unique<ChannelCountConverter>(
sourceChannelCount,
sinkChannelCount);
lastOutput->connect(&mChannelCountConverter->input);
lastOutput = &mChannelCountConverter->output;
}
}
// Sample Rate conversion // Sample Rate conversion
if (sourceSampleRate != sinkSampleRate) { if (sourceSampleRate != sinkSampleRate) {
mResampler.reset(MultiChannelResampler::make(sourceChannelCount, // Create a resampler to do the math.
mResampler.reset(MultiChannelResampler::make(lastOutput->getSamplesPerFrame(),
sourceSampleRate, sourceSampleRate,
sinkSampleRate, sinkSampleRate,
convertOboeSRQualityToMCR( convertOboeSRQualityToMCR(
sourceStream->getSampleRateConversionQuality()))); sourceStream->getSampleRateConversionQuality())));
mRateConverter = std::make_unique<SampleRateConverter>(sourceChannelCount, // Make a flowgraph node that uses the resampler.
mRateConverter = std::make_unique<SampleRateConverter>(lastOutput->getSamplesPerFrame(),
*mResampler.get()); *mResampler.get());
lastOutput->connect(&mRateConverter->input); lastOutput->connect(&mRateConverter->input);
lastOutput = &mRateConverter->output; lastOutput = &mRateConverter->output;
} }
// Expand the number of channels if required. // Expand the number of channels if required.
if (sourceChannelCount == 1 && sinkChannelCount > 1) { if (sourceChannelCount < sinkChannelCount) {
mChannelConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount); if (sourceChannelCount == 1) {
lastOutput->connect(&mChannelConverter->input); mMonoToMultiConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
lastOutput = &mChannelConverter->output; lastOutput->connect(&mMonoToMultiConverter->input);
} else if (sourceChannelCount != sinkChannelCount) { lastOutput = &mMonoToMultiConverter->output;
LOGW("%s() Channel reduction not supported.", __func__); } else {
return Result::ErrorUnimplemented; // TODO mChannelCountConverter = std::make_unique<ChannelCountConverter>(
sourceChannelCount,
sinkChannelCount);
lastOutput->connect(&mChannelCountConverter->input);
lastOutput = &mChannelCountConverter->output;
}
} }
// Sink // Sink
@ -170,14 +220,18 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
case AudioFormat::I16: case AudioFormat::I16:
mSink = std::make_unique<SinkI16>(sinkChannelCount); mSink = std::make_unique<SinkI16>(sinkChannelCount);
break; break;
case AudioFormat::I24:
mSink = std::make_unique<SinkI24>(sinkChannelCount);
break;
case AudioFormat::I32:
mSink = std::make_unique<SinkI32>(sinkChannelCount);
break;
default: default:
LOGE("%s() Unsupported sink format = %d", __func__, sinkFormat); LOGE("%s() Unsupported sink format = %d", __func__, sinkFormat);
return Result::ErrorIllegalArgument;; return Result::ErrorIllegalArgument;;
} }
lastOutput->connect(&mSink->input); lastOutput->connect(&mSink->input);
mFramePosition = 0;
return Result::OK; return Result::OK;
} }
@ -185,8 +239,7 @@ int32_t DataConversionFlowGraph::read(void *buffer, int32_t numFrames, int64_t t
if (mSourceCaller) { if (mSourceCaller) {
mSourceCaller->setTimeoutNanos(timeoutNanos); mSourceCaller->setTimeoutNanos(timeoutNanos);
} }
int32_t numRead = mSink->read(mFramePosition, buffer, numFrames); int32_t numRead = mSink->read(buffer, numFrames);
mFramePosition += numRead;
return numRead; return numRead;
} }
@ -196,8 +249,7 @@ int32_t DataConversionFlowGraph::write(void *inputBuffer, int32_t numFrames) {
mSource->setData(inputBuffer, numFrames); mSource->setData(inputBuffer, numFrames);
while (true) { while (true) {
// Pull and read some data in app format into a small buffer. // Pull and read some data in app format into a small buffer.
int32_t framesRead = mSink->read(mFramePosition, mAppBuffer.get(), flowgraph::kDefaultBufferSize); int32_t framesRead = mSink->read(mAppBuffer.get(), flowgraph::kDefaultBufferSize);
mFramePosition += framesRead;
if (framesRead <= 0) break; if (framesRead <= 0) break;
// Write to a block adapter, which will call the destination whenever it has enough data. // Write to a block adapter, which will call the destination whenever it has enough data.
int32_t bytesRead = mBlockWriter.write(mAppBuffer.get(), int32_t bytesRead = mBlockWriter.write(mAppBuffer.get(),
@ -209,7 +261,7 @@ int32_t DataConversionFlowGraph::write(void *inputBuffer, int32_t numFrames) {
int32_t DataConversionFlowGraph::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) { int32_t DataConversionFlowGraph::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) {
int32_t numFrames = numBytes / mFilterStream->getBytesPerFrame(); int32_t numFrames = numBytes / mFilterStream->getBytesPerFrame();
mCallbackResult = mFilterStream->getCallback()->onAudioReady(mFilterStream, buffer, numFrames); mCallbackResult = mFilterStream->getDataCallback()->onAudioReady(mFilterStream, buffer, numFrames);
// TODO handle STOP from callback, process data remaining in the block adapter // TODO handle STOP from callback, process data remaining in the block adapter
return numBytes; return numBytes;
} }

View File

@ -21,7 +21,9 @@
#include <stdint.h> #include <stdint.h>
#include <sys/types.h> #include <sys/types.h>
#include <flowgraph/ChannelCountConverter.h>
#include <flowgraph/MonoToMultiConverter.h> #include <flowgraph/MonoToMultiConverter.h>
#include <flowgraph/MultiToMonoConverter.h>
#include <flowgraph/SampleRateConverter.h> #include <flowgraph/SampleRateConverter.h>
#include <oboe/Definitions.h> #include <oboe/Definitions.h>
#include "AudioSourceCaller.h" #include "AudioSourceCaller.h"
@ -67,7 +69,9 @@ public:
private: private:
std::unique_ptr<flowgraph::FlowGraphSourceBuffered> mSource; std::unique_ptr<flowgraph::FlowGraphSourceBuffered> mSource;
std::unique_ptr<AudioSourceCaller> mSourceCaller; std::unique_ptr<AudioSourceCaller> mSourceCaller;
std::unique_ptr<flowgraph::MonoToMultiConverter> mChannelConverter; std::unique_ptr<flowgraph::MonoToMultiConverter> mMonoToMultiConverter;
std::unique_ptr<flowgraph::MultiToMonoConverter> mMultiToMonoConverter;
std::unique_ptr<flowgraph::ChannelCountConverter> mChannelCountConverter;
std::unique_ptr<resampler::MultiChannelResampler> mResampler; std::unique_ptr<resampler::MultiChannelResampler> mResampler;
std::unique_ptr<flowgraph::SampleRateConverter> mRateConverter; std::unique_ptr<flowgraph::SampleRateConverter> mRateConverter;
std::unique_ptr<flowgraph::FlowGraphSink> mSink; std::unique_ptr<flowgraph::FlowGraphSink> mSink;
@ -76,8 +80,6 @@ private:
DataCallbackResult mCallbackResult = DataCallbackResult::Continue; DataCallbackResult mCallbackResult = DataCallbackResult::Continue;
AudioStream *mFilterStream = nullptr; AudioStream *mFilterStream = nullptr;
std::unique_ptr<uint8_t[]> mAppBuffer; std::unique_ptr<uint8_t[]> mAppBuffer;
int64_t mFramePosition = 0;
}; };
} }

View File

@ -16,6 +16,7 @@
#include <memory> #include <memory>
#include "OboeDebug.h"
#include "FilterAudioStream.h" #include "FilterAudioStream.h"
using namespace oboe; using namespace oboe;
@ -47,7 +48,7 @@ Result FilterAudioStream::configureFlowGraph() {
AudioStream *sourceStream = isOutput ? this : mChildStream.get(); AudioStream *sourceStream = isOutput ? this : mChildStream.get();
AudioStream *sinkStream = isOutput ? mChildStream.get() : this; AudioStream *sinkStream = isOutput ? mChildStream.get() : this;
mRateScaler = ((double) sourceStream->getSampleRate()) / sinkStream->getSampleRate(); mRateScaler = ((double) getSampleRate()) / mChildStream->getSampleRate();
return mFlowGraph->configure(sourceStream, sinkStream); return mFlowGraph->configure(sourceStream, sinkStream);
} }
@ -90,3 +91,16 @@ ResultWithValue<int32_t> FilterAudioStream::read(void *buffer,
return ResultWithValue<int32_t>::createBasedOnSign(framesRead); return ResultWithValue<int32_t>::createBasedOnSign(framesRead);
} }
DataCallbackResult FilterAudioStream::onAudioReady(AudioStream *oboeStream,
void *audioData,
int32_t numFrames) {
int32_t framesProcessed;
if (oboeStream->getDirection() == Direction::Output) {
framesProcessed = mFlowGraph->read(audioData, numFrames, 0 /* timeout */);
} else {
framesProcessed = mFlowGraph->write(audioData, numFrames);
}
return (framesProcessed < numFrames)
? DataCallbackResult::Stop
: mFlowGraph->getDataCallbackResult();
}

View File

@ -42,8 +42,11 @@ public:
: AudioStream(builder) : AudioStream(builder)
, mChildStream(childStream) { , mChildStream(childStream) {
// Intercept the callback if used. // Intercept the callback if used.
if (builder.getCallback() != nullptr) { if (builder.isErrorCallbackSpecified()) {
mStreamCallback = mChildStream->swapCallback(this); mErrorCallback = mChildStream->swapErrorCallback(this);
}
if (builder.isDataCallbackSpecified()) {
mDataCallback = mChildStream->swapDataCallback(this);
} else { } else {
const int size = childStream->getFramesPerBurst() * childStream->getBytesPerFrame(); const int size = childStream->getFramesPerBurst() * childStream->getBytesPerFrame();
mBlockingBuffer = std::make_unique<uint8_t[]>(size); mBlockingBuffer = std::make_unique<uint8_t[]>(size);
@ -52,6 +55,9 @@ public:
// Copy parameters that may not match builder. // Copy parameters that may not match builder.
mBufferCapacityInFrames = mChildStream->getBufferCapacityInFrames(); mBufferCapacityInFrames = mChildStream->getBufferCapacityInFrames();
mPerformanceMode = mChildStream->getPerformanceMode(); mPerformanceMode = mChildStream->getPerformanceMode();
mInputPreset = mChildStream->getInputPreset();
mFramesPerBurst = mChildStream->getFramesPerBurst();
mDeviceId = mChildStream->getDeviceId();
} }
virtual ~FilterAudioStream() = default; virtual ~FilterAudioStream() = default;
@ -109,7 +115,7 @@ public:
int32_t numFrames, int32_t numFrames,
int64_t timeoutNanoseconds) override; int64_t timeoutNanoseconds) override;
StreamState getState() const override { StreamState getState() override {
return mChildStream->getState(); return mChildStream->getState();
} }
@ -124,10 +130,6 @@ public:
return mChildStream->isXRunCountSupported(); return mChildStream->isXRunCountSupported();
} }
int32_t getFramesPerBurst() override {
return mChildStream->getFramesPerBurst();
}
AudioApi getAudioApi() const override { AudioApi getAudioApi() const override {
return mChildStream->getAudioApi(); return mChildStream->getAudioApi();
} }
@ -155,7 +157,7 @@ public:
return mBufferSizeInFrames; return mBufferSizeInFrames;
} }
ResultWithValue<int32_t> getXRunCount() const override { ResultWithValue<int32_t> getXRunCount() override {
return mChildStream->getXRunCount(); return mChildStream->getXRunCount();
} }
@ -169,38 +171,45 @@ public:
int64_t *timeNanoseconds) override { int64_t *timeNanoseconds) override {
int64_t childPosition = 0; int64_t childPosition = 0;
Result result = mChildStream->getTimestamp(clockId, &childPosition, timeNanoseconds); Result result = mChildStream->getTimestamp(clockId, &childPosition, timeNanoseconds);
// It is OK if framePosition is null.
if (framePosition) {
*framePosition = childPosition * mRateScaler; *framePosition = childPosition * mRateScaler;
}
return result; return result;
} }
DataCallbackResult onAudioReady(AudioStream *oboeStream, DataCallbackResult onAudioReady(AudioStream *oboeStream,
void *audioData, void *audioData,
int32_t numFrames) override { int32_t numFrames) override;
int32_t framesProcessed;
if (oboeStream->getDirection() == Direction::Output) { bool onError(AudioStream * /*audioStream*/, Result error) override {
framesProcessed = mFlowGraph->read(audioData, numFrames, 0 /* timeout */); if (mErrorCallback != nullptr) {
} else { return mErrorCallback->onError(this, error);
framesProcessed = mFlowGraph->write(audioData, numFrames);
} }
return (framesProcessed < numFrames) return false;
? DataCallbackResult::Stop
: mFlowGraph->getDataCallbackResult();
} }
void onErrorBeforeClose(AudioStream *oboeStream, Result error) override { void onErrorBeforeClose(AudioStream * /*oboeStream*/, Result error) override {
if (mStreamCallback != nullptr) { if (mErrorCallback != nullptr) {
mStreamCallback->onErrorBeforeClose(this, error); mErrorCallback->onErrorBeforeClose(this, error);
} }
} }
void onErrorAfterClose(AudioStream *oboeStream, Result error) override { void onErrorAfterClose(AudioStream * /*oboeStream*/, Result error) override {
// Close this parent stream because the callback will only close the child. // Close this parent stream because the callback will only close the child.
AudioStream::close(); AudioStream::close();
if (mStreamCallback != nullptr) { if (mErrorCallback != nullptr) {
mStreamCallback->onErrorAfterClose(this, error); mErrorCallback->onErrorAfterClose(this, error);
} }
} }
/**
* @return last result passed from an error callback
*/
oboe::Result getLastErrorCallbackResult() const override {
return mChildStream->getLastErrorCallbackResult();
}
private: private:
std::unique_ptr<AudioStream> mChildStream; // this stream wraps the child stream std::unique_ptr<AudioStream> mChildStream; // this stream wraps the child stream

View File

@ -64,6 +64,9 @@ Result LatencyTuner::tune() {
// or was from stream->getBufferCapacityInFrames()) // or was from stream->getBufferCapacityInFrames())
if (requestedBufferSize > mMaxBufferSize) requestedBufferSize = mMaxBufferSize; if (requestedBufferSize > mMaxBufferSize) requestedBufferSize = mMaxBufferSize;
// Note that this will not allocate more memory. It simply determines
// how much of the existing buffer capacity will be used. The size will be
// clipped to the bufferCapacity by AAudio.
auto setBufferResult = mStream.setBufferSizeInFrames(requestedBufferSize); auto setBufferResult = mStream.setBufferSizeInFrames(requestedBufferSize);
if (setBufferResult != Result::OK) { if (setBufferResult != Result::OK) {
result = setBufferResult; result = setBufferResult;

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "oboe/OboeExtensions.h"
#include "aaudio/AAudioExtensions.h"
using namespace oboe;
bool OboeExtensions::isMMapSupported(){
return AAudioExtensions::getInstance().isMMapSupported();
}
bool OboeExtensions::isMMapEnabled(){
return AAudioExtensions::getInstance().isMMapEnabled();
}
int32_t OboeExtensions::setMMapEnabled(bool enabled){
return AAudioExtensions::getInstance().setMMapEnabled(enabled);
}
bool OboeExtensions::isMMapUsed(oboe::AudioStream *oboeStream){
return AAudioExtensions::getInstance().isMMapUsed(oboeStream);
}

View File

@ -17,6 +17,7 @@
#include <oboe/AudioStreamBuilder.h> #include <oboe/AudioStreamBuilder.h>
#include <oboe/Oboe.h> #include <oboe/Oboe.h>
#include "OboeDebug.h"
#include "QuirksManager.h" #include "QuirksManager.h"
using namespace oboe; using namespace oboe;
@ -51,40 +52,146 @@ int32_t QuirksManager::DeviceQuirks::clipBufferSize(AudioStream &stream,
return adjustedSize; return adjustedSize;
} }
class SamsungDeviceQuirks : public QuirksManager::DeviceQuirks { bool QuirksManager::DeviceQuirks::isAAudioMMapPossible(const AudioStreamBuilder &builder) const {
public: bool isSampleRateCompatible =
SamsungDeviceQuirks() { builder.getSampleRate() == oboe::Unspecified
std::string arch = getPropertyString("ro.arch"); || builder.getSampleRate() == kCommonNativeRate
isExynos = (arch.rfind("exynos", 0) == 0); // starts with? || builder.getSampleRateConversionQuality() != SampleRateConversionQuality::None;
return builder.getPerformanceMode() == PerformanceMode::LowLatency
&& isSampleRateCompatible
&& builder.getChannelCount() <= kChannelCountStereo;
} }
virtual ~SamsungDeviceQuirks() = default; bool QuirksManager::DeviceQuirks::shouldConvertFloatToI16ForOutputStreams() {
std::string productManufacturer = getPropertyString("ro.product.manufacturer");
if (getSdkVersion() < __ANDROID_API_L__) {
return true;
} else if ((productManufacturer == "vivo") && (getSdkVersion() < __ANDROID_API_M__)) {
return true;
}
return false;
}
/**
* This is for Samsung Exynos quirks. Samsung Mobile uses Qualcomm chips so
* the QualcommDeviceQuirks would apply.
*/
class SamsungExynosDeviceQuirks : public QuirksManager::DeviceQuirks {
public:
SamsungExynosDeviceQuirks() {
std::string chipname = getPropertyString("ro.hardware.chipname");
isExynos9810 = (chipname == "exynos9810");
isExynos990 = (chipname == "exynos990");
isExynos850 = (chipname == "exynos850");
mBuildChangelist = getPropertyInteger("ro.build.changelist", 0);
}
virtual ~SamsungExynosDeviceQuirks() = default;
int32_t getExclusiveBottomMarginInBursts() const override { int32_t getExclusiveBottomMarginInBursts() const override {
// TODO Make this conditional on build version when MMAP timing improves. return kBottomMargin;
return isExynos ? kBottomMarginExynos : kBottomMarginOther;
} }
int32_t getExclusiveTopMarginInBursts() const override { int32_t getExclusiveTopMarginInBursts() const override {
return kTopMargin; return kTopMargin;
} }
// See Oboe issues #824 and #1247 for more information.
bool isMonoMMapActuallyStereo() const override {
return isExynos9810 || isExynos850; // TODO We can make this version specific if it gets fixed.
}
bool isAAudioMMapPossible(const AudioStreamBuilder &builder) const override {
return DeviceQuirks::isAAudioMMapPossible(builder)
// Samsung says they use Legacy for Camcorder
&& builder.getInputPreset() != oboe::InputPreset::Camcorder;
}
bool isMMapSafe(const AudioStreamBuilder &builder) override {
const bool isInput = builder.getDirection() == Direction::Input;
// This detects b/159066712 , S20 LSI has corrupt low latency audio recording
// and turns off MMAP.
// See also https://github.com/google/oboe/issues/892
bool isRecordingCorrupted = isInput
&& isExynos990
&& mBuildChangelist < 19350896;
// Certain S9+ builds record silence when using MMAP and not using the VoiceCommunication
// preset.
// See https://github.com/google/oboe/issues/1110
bool wouldRecordSilence = isInput
&& isExynos9810
&& mBuildChangelist <= 18847185
&& (builder.getInputPreset() != InputPreset::VoiceCommunication);
if (wouldRecordSilence){
LOGI("QuirksManager::%s() Requested stream configuration would result in silence on "
"this device. Switching off MMAP.", __func__);
}
return !isRecordingCorrupted && !wouldRecordSilence;
}
private: private:
// Stay farther away from DSP position on Exynos devices. // Stay farther away from DSP position on Exynos devices.
static constexpr int32_t kBottomMarginExynos = 2; static constexpr int32_t kBottomMargin = 2;
static constexpr int32_t kBottomMarginOther = 1;
static constexpr int32_t kTopMargin = 1; static constexpr int32_t kTopMargin = 1;
bool isExynos = false; bool isExynos9810 = false;
bool isExynos990 = false;
bool isExynos850 = false;
int mBuildChangelist = 0;
};
class QualcommDeviceQuirks : public QuirksManager::DeviceQuirks {
public:
QualcommDeviceQuirks() {
std::string modelName = getPropertyString("ro.soc.model");
isSM8150 = (modelName == "SDM8150");
}
virtual ~QualcommDeviceQuirks() = default;
int32_t getExclusiveBottomMarginInBursts() const override {
return kBottomMargin;
}
bool isMMapSafe(const AudioStreamBuilder &builder) override {
// See https://github.com/google/oboe/issues/1121#issuecomment-897957749
bool isMMapBroken = false;
if (isSM8150 && (getSdkVersion() <= __ANDROID_API_P__)) {
LOGI("QuirksManager::%s() MMAP not actually supported on this chip."
" Switching off MMAP.", __func__);
isMMapBroken = true;
}
return !isMMapBroken;
}
private:
bool isSM8150 = false;
static constexpr int32_t kBottomMargin = 1;
}; };
QuirksManager::QuirksManager() { QuirksManager::QuirksManager() {
std::string manufacturer = getPropertyString("ro.product.manufacturer"); std::string productManufacturer = getPropertyString("ro.product.manufacturer");
if (manufacturer == "samsung") { if (productManufacturer == "samsung") {
mDeviceQuirks = std::make_unique<SamsungDeviceQuirks>(); std::string arch = getPropertyString("ro.arch");
bool isExynos = (arch.rfind("exynos", 0) == 0); // starts with?
if (isExynos) {
mDeviceQuirks = std::make_unique<SamsungExynosDeviceQuirks>();
}
}
if (!mDeviceQuirks) {
std::string socManufacturer = getPropertyString("ro.soc.manufacturer");
if (socManufacturer == "Qualcomm") {
// This may include Samsung Mobile devices.
mDeviceQuirks = std::make_unique<QualcommDeviceQuirks>();
} else { } else {
mDeviceQuirks = std::make_unique<DeviceQuirks>(); mDeviceQuirks = std::make_unique<DeviceQuirks>();
} }
} }
}
bool QuirksManager::isConversionNeeded( bool QuirksManager::isConversionNeeded(
const AudioStreamBuilder &builder, const AudioStreamBuilder &builder,
@ -94,7 +201,30 @@ bool QuirksManager::isConversionNeeded(
const bool isInput = builder.getDirection() == Direction::Input; const bool isInput = builder.getDirection() == Direction::Input;
const bool isFloat = builder.getFormat() == AudioFormat::Float; const bool isFloat = builder.getFormat() == AudioFormat::Float;
// If a SAMPLE RATE is specified for low latency then let the native code choose an optimal rate. // There are multiple bugs involving using callback with a specified callback size.
// Issue #778: O to Q had a problem with Legacy INPUT streams for FLOAT streams
// and a specified callback size. It would assert because of a bad buffer size.
//
// Issue #973: O to R had a problem with Legacy output streams using callback and a specified callback size.
// An AudioTrack stream could still be running when the AAudio FixedBlockReader was closed.
// Internally b/161914201#comment25
//
// Issue #983: O to R would glitch if the framesPerCallback was too small.
//
// Most of these problems were related to Legacy stream. MMAP was OK. But we don't
// know if we will get an MMAP stream. So, to be safe, just do the conversion in Oboe.
if (OboeGlobals::areWorkaroundsEnabled()
&& builder.willUseAAudio()
&& builder.isDataCallbackSpecified()
&& builder.getFramesPerDataCallback() != 0
&& getSdkVersion() <= __ANDROID_API_R__) {
LOGI("QuirksManager::%s() avoid setFramesPerCallback(n>0)", __func__);
childBuilder.setFramesPerCallback(oboe::Unspecified);
conversionNeeded = true;
}
// If a SAMPLE RATE is specified for low latency, let the native code choose an optimal rate.
// This isn't really a workaround. It is an Oboe feature that is convenient to place here.
// TODO There may be a problem if the devices supports low latency // TODO There may be a problem if the devices supports low latency
// at a higher rate than the default. // at a higher rate than the default.
if (builder.getSampleRate() != oboe::Unspecified if (builder.getSampleRate() != oboe::Unspecified
@ -107,7 +237,8 @@ bool QuirksManager::isConversionNeeded(
// Data Format // Data Format
// OpenSL ES and AAudio before P do not support FAST path for FLOAT capture. // OpenSL ES and AAudio before P do not support FAST path for FLOAT capture.
if (isFloat if (OboeGlobals::areWorkaroundsEnabled()
&& isFloat
&& isInput && isInput
&& builder.isFormatConversionAllowed() && builder.isFormatConversionAllowed()
&& isLowLatency && isLowLatency
@ -115,24 +246,59 @@ bool QuirksManager::isConversionNeeded(
) { ) {
childBuilder.setFormat(AudioFormat::I16); // needed for FAST track childBuilder.setFormat(AudioFormat::I16); // needed for FAST track
conversionNeeded = true; conversionNeeded = true;
LOGI("QuirksManager::%s() forcing internal format to I16 for low latency", __func__);
} }
// Channel Count // Add quirk for float output when needed.
if (builder.getChannelCount() != oboe::Unspecified
&& builder.isChannelConversionAllowed()) {
if (OboeGlobals::areWorkaroundsEnabled() if (OboeGlobals::areWorkaroundsEnabled()
&& builder.getChannelCount() == 2 // stereo? && isFloat
&& !isInput
&& builder.isFormatConversionAllowed()
&& mDeviceQuirks->shouldConvertFloatToI16ForOutputStreams()
) {
childBuilder.setFormat(AudioFormat::I16);
conversionNeeded = true;
LOGI("QuirksManager::%s() float was requested but not supported on pre-L devices "
"and some devices like Vivo devices may have issues on L devices, "
"creating an underlying I16 stream and using format conversion to provide a float "
"stream", __func__);
}
// Channel Count conversions
if (OboeGlobals::areWorkaroundsEnabled()
&& builder.isChannelConversionAllowed()
&& builder.getChannelCount() == kChannelCountStereo
&& isInput && isInput
&& isLowLatency && isLowLatency
&& (!builder.willUseAAudio() && (getSdkVersion() == __ANDROID_API_O__))) { && (!builder.willUseAAudio() && (getSdkVersion() == __ANDROID_API_O__))
) {
// Workaround for heap size regression in O. // Workaround for heap size regression in O.
// b/66967812 AudioRecord does not allow FAST track for stereo capture in O // b/66967812 AudioRecord does not allow FAST track for stereo capture in O
childBuilder.setChannelCount(1); childBuilder.setChannelCount(kChannelCountMono);
conversionNeeded = true; conversionNeeded = true;
LOGI("QuirksManager::%s() using mono internally for low latency on O", __func__);
} else if (OboeGlobals::areWorkaroundsEnabled()
&& builder.getChannelCount() == kChannelCountMono
&& isInput
&& mDeviceQuirks->isMonoMMapActuallyStereo()
&& builder.willUseAAudio()
// Note: we might use this workaround on a device that supports
// MMAP but will use Legacy for this stream. But this will only happen
// on devices that have the broken mono.
&& mDeviceQuirks->isAAudioMMapPossible(builder)
) {
// Workaround for mono actually running in stereo mode.
childBuilder.setChannelCount(kChannelCountStereo); // Use stereo and extract first channel.
conversionNeeded = true;
LOGI("QuirksManager::%s() using stereo internally to avoid broken mono", __func__);
} }
// Note that MMAP does not support mono in 8.1. But that would only matter on Pixel 1 // Note that MMAP does not support mono in 8.1. But that would only matter on Pixel 1
// phones and they have almost all been updated to 9.0. // phones and they have almost all been updated to 9.0.
}
return conversionNeeded; return conversionNeeded;
} }
bool QuirksManager::isMMapSafe(AudioStreamBuilder &builder) {
if (!OboeGlobals::areWorkaroundsEnabled()) return true;
return mDeviceQuirks->isMMapSafe(builder);
}

View File

@ -21,6 +21,10 @@
#include <oboe/AudioStreamBuilder.h> #include <oboe/AudioStreamBuilder.h>
#include <aaudio/AudioStreamAAudio.h> #include <aaudio/AudioStreamAAudio.h>
#ifndef __ANDROID_API_R__
#define __ANDROID_API_R__ 30
#endif
namespace oboe { namespace oboe {
/** /**
@ -91,6 +95,20 @@ public:
return kDefaultTopMarginInBursts; return kDefaultTopMarginInBursts;
} }
// On some devices, you can open a mono stream but it is actually running in stereo!
virtual bool isMonoMMapActuallyStereo() const {
return false;
}
virtual bool isAAudioMMapPossible(const AudioStreamBuilder &builder) const;
virtual bool isMMapSafe(const AudioStreamBuilder & /* builder */ ) {
return true;
}
// On some devices, Float does not work so it should be converted to I16.
static bool shouldConvertFloatToI16ForOutputStreams();
static constexpr int32_t kDefaultBottomMarginInBursts = 0; static constexpr int32_t kDefaultBottomMarginInBursts = 0;
static constexpr int32_t kDefaultTopMarginInBursts = 0; static constexpr int32_t kDefaultTopMarginInBursts = 0;
@ -98,10 +116,16 @@ public:
// b/129545119 | AAudio Legacy allows setBufferSizeInFrames too low // b/129545119 | AAudio Legacy allows setBufferSizeInFrames too low
// Fixed in Q // Fixed in Q
static constexpr int32_t kLegacyBottomMarginInBursts = 1; static constexpr int32_t kLegacyBottomMarginInBursts = 1;
static constexpr int32_t kCommonNativeRate = 48000; // very typical native sample rate
}; };
bool isMMapSafe(AudioStreamBuilder &builder);
private: private:
static constexpr int32_t kChannelCountMono = 1;
static constexpr int32_t kChannelCountStereo = 2;
std::unique_ptr<DeviceQuirks> mDeviceQuirks{}; std::unique_ptr<DeviceQuirks> mDeviceQuirks{};
}; };

View File

@ -32,7 +32,8 @@ class SourceI16Caller : public AudioSourceCaller {
public: public:
SourceI16Caller(int32_t channelCount, int32_t framesPerCallback) SourceI16Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, sizeof(int16_t)) { : AudioSourceCaller(channelCount, framesPerCallback, sizeof(int16_t)) {
mConversionBuffer = std::make_unique<int16_t[]>(channelCount * output.getFramesPerBuffer()); mConversionBuffer = std::make_unique<int16_t[]>(static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
} }
int32_t onProcess(int32_t numFrames) override; int32_t onProcess(int32_t numFrames) override;

View File

@ -0,0 +1,56 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "flowgraph/FlowGraphNode.h"
#include "SourceI24Caller.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace oboe;
using namespace flowgraph;
int32_t SourceI24Caller::onProcess(int32_t numFrames) {
int32_t numBytes = mStream->getBytesPerFrame() * numFrames;
int32_t bytesRead = mBlockReader.read((uint8_t *) mConversionBuffer.get(), numBytes);
int32_t framesRead = bytesRead / mStream->getBytesPerFrame();
float *floatData = output.getBuffer();
const uint8_t *byteData = mConversionBuffer.get();
int32_t numSamples = framesRead * output.getSamplesPerFrame();
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_p24(floatData, byteData, numSamples);
#else
static const float scale = 1. / (float)(1UL << 31);
for (int i = 0; i < numSamples; i++) {
// Assemble the data assuming Little Endian format.
int32_t pad = byteData[2];
pad <<= 8;
pad |= byteData[1];
pad <<= 8;
pad |= byteData[0];
pad <<= 8; // Shift to 32 bit data so the sign is correct.
byteData += kBytesPerI24Packed;
*floatData++ = pad * scale; // scale to range -1.0 to 1.0
}
#endif
return framesRead;
}

View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_SOURCE_I24_CALLER_H
#define OBOE_SOURCE_I24_CALLER_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "AudioSourceCaller.h"
#include "FixedBlockReader.h"
namespace oboe {
/**
* AudioSource that uses callback to get more data.
*/
class SourceI24Caller : public AudioSourceCaller {
public:
SourceI24Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, kBytesPerI24Packed) {
mConversionBuffer = std::make_unique<uint8_t[]>(static_cast<size_t>(kBytesPerI24Packed)
* static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
}
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI24Caller";
}
private:
std::unique_ptr<uint8_t[]> mConversionBuffer;
static constexpr int kBytesPerI24Packed = 3;
};
}
#endif //OBOE_SOURCE_I16_CALLER_H

View File

@ -0,0 +1,47 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "flowgraph/FlowGraphNode.h"
#include "SourceI32Caller.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace oboe;
using namespace flowgraph;
int32_t SourceI32Caller::onProcess(int32_t numFrames) {
int32_t numBytes = mStream->getBytesPerFrame() * numFrames;
int32_t bytesRead = mBlockReader.read((uint8_t *) mConversionBuffer.get(), numBytes);
int32_t framesRead = bytesRead / mStream->getBytesPerFrame();
float *floatData = output.getBuffer();
const int32_t *intData = mConversionBuffer.get();
int32_t numSamples = framesRead * output.getSamplesPerFrame();
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_i32(floatData, shortData, numSamples);
#else
for (int i = 0; i < numSamples; i++) {
*floatData++ = *intData++ * kScale;
}
#endif
return framesRead;
}

View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_SOURCE_I32_CALLER_H
#define OBOE_SOURCE_I32_CALLER_H
#include <memory.h>
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "AudioSourceCaller.h"
#include "FixedBlockReader.h"
namespace oboe {
/**
* AudioSource that uses callback to get more data.
*/
class SourceI32Caller : public AudioSourceCaller {
public:
SourceI32Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, sizeof(int32_t)) {
mConversionBuffer = std::make_unique<int32_t[]>(static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
}
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI32Caller";
}
private:
std::unique_ptr<int32_t[]> mConversionBuffer;
static constexpr float kScale = 1.0 / (1UL << 31);
};
}
#endif //OBOE_SOURCE_I32_CALLER_H

View File

@ -60,6 +60,12 @@ int32_t convertFormatToSizeInBytes(AudioFormat format) {
case AudioFormat::Float: case AudioFormat::Float:
size = sizeof(float); size = sizeof(float);
break; break;
case AudioFormat::I24:
size = 3; // packed 24-bit data
break;
case AudioFormat::I32:
size = sizeof(int32_t);
break;
default: default:
break; break;
} }
@ -98,6 +104,8 @@ const char *convertToText<AudioFormat>(AudioFormat format) {
case AudioFormat::Unspecified: return "Unspecified"; case AudioFormat::Unspecified: return "Unspecified";
case AudioFormat::I16: return "I16"; case AudioFormat::I16: return "I16";
case AudioFormat::Float: return "Float"; case AudioFormat::Float: return "Float";
case AudioFormat::I24: return "I24";
case AudioFormat::I32: return "I32";
default: return "Unrecognized format"; default: return "Unrecognized format";
} }
} }
@ -183,7 +191,7 @@ const char *convertToText<AudioStream*>(AudioStream* stream) {
<<"BufferCapacity: "<<stream->getBufferCapacityInFrames()<<std::endl <<"BufferCapacity: "<<stream->getBufferCapacityInFrames()<<std::endl
<<"BufferSize: "<<stream->getBufferSizeInFrames()<<std::endl <<"BufferSize: "<<stream->getBufferSizeInFrames()<<std::endl
<<"FramesPerBurst: "<< stream->getFramesPerBurst()<<std::endl <<"FramesPerBurst: "<< stream->getFramesPerBurst()<<std::endl
<<"FramesPerCallback: "<<stream->getFramesPerCallback()<<std::endl <<"FramesPerDataCallback: "<<stream->getFramesPerDataCallback()<<std::endl
<<"SampleRate: "<<stream->getSampleRate()<<std::endl <<"SampleRate: "<<stream->getSampleRate()<<std::endl
<<"ChannelCount: "<<stream->getChannelCount()<<std::endl <<"ChannelCount: "<<stream->getChannelCount()<<std::endl
<<"Format: "<<oboe::convertToText(stream->getFormat())<<std::endl <<"Format: "<<oboe::convertToText(stream->getFormat())<<std::endl
@ -302,4 +310,8 @@ int getSdkVersion() {
return sCachedSdkVersion; return sCachedSdkVersion;
} }
int getChannelCountFromChannelMask(ChannelMask channelMask) {
return __builtin_popcount(static_cast<uint32_t>(channelMask));
}
}// namespace oboe }// namespace oboe

View File

@ -14,18 +14,14 @@
* limitations under the License. * limitations under the License.
*/ */
#include <stdint.h>
#include <time.h>
#include <memory.h>
#include <cassert>
#include <algorithm> #include <algorithm>
#include <memory.h>
#include <stdint.h>
#include "common/OboeDebug.h" #include "oboe/FifoControllerBase.h"
#include "fifo/FifoControllerBase.h"
#include "fifo/FifoController.h" #include "fifo/FifoController.h"
#include "fifo/FifoControllerIndirect.h" #include "fifo/FifoControllerIndirect.h"
#include "fifo/FifoBuffer.h" #include "oboe/FifoBuffer.h"
#include "common/AudioClock.h"
namespace oboe { namespace oboe {

View File

@ -14,9 +14,8 @@
* limitations under the License. * limitations under the License.
*/ */
#include <cassert> #include <stdint.h>
#include <sys/types.h>
#include "FifoControllerBase.h"
#include "FifoController.h" #include "FifoController.h"
namespace oboe { namespace oboe {

View File

@ -17,9 +17,10 @@
#ifndef NATIVEOBOE_FIFOCONTROLLER_H #ifndef NATIVEOBOE_FIFOCONTROLLER_H
#define NATIVEOBOE_FIFOCONTROLLER_H #define NATIVEOBOE_FIFOCONTROLLER_H
#include <sys/types.h>
#include "FifoControllerBase.h"
#include <atomic> #include <atomic>
#include <stdint.h>
#include "oboe/FifoControllerBase.h"
namespace oboe { namespace oboe {

View File

@ -14,14 +14,11 @@
* limitations under the License. * limitations under the License.
*/ */
#include "FifoControllerBase.h"
#include <cassert>
#include <sys/types.h>
#include <algorithm> #include <algorithm>
#include "FifoControllerBase.h" #include <cassert>
#include <stdint.h>
#include "common/OboeDebug.h" #include "oboe/FifoControllerBase.h"
namespace oboe { namespace oboe {

View File

@ -14,6 +14,7 @@
* limitations under the License. * limitations under the License.
*/ */
#include <stdint.h>
#include "FifoControllerIndirect.h" #include "FifoControllerIndirect.h"

View File

@ -17,8 +17,10 @@
#ifndef NATIVEOBOE_FIFOCONTROLLERINDIRECT_H #ifndef NATIVEOBOE_FIFOCONTROLLERINDIRECT_H
#define NATIVEOBOE_FIFOCONTROLLERINDIRECT_H #define NATIVEOBOE_FIFOCONTROLLERINDIRECT_H
#include "FifoControllerBase.h"
#include <atomic> #include <atomic>
#include <stdint.h>
#include "oboe/FifoControllerBase.h"
namespace oboe { namespace oboe {

View File

@ -0,0 +1,52 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "ChannelCountConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ChannelCountConverter::ChannelCountConverter(
int32_t inputChannelCount,
int32_t outputChannelCount)
: input(*this, inputChannelCount)
, output(*this, outputChannelCount) {
}
ChannelCountConverter::~ChannelCountConverter() = default;
int32_t ChannelCountConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
int32_t inputChannelCount = input.getSamplesPerFrame();
int32_t outputChannelCount = output.getSamplesPerFrame();
for (int i = 0; i < numFrames; i++) {
int inputChannel = 0;
for (int outputChannel = 0; outputChannel < outputChannelCount; outputChannel++) {
// Copy input channels to output channels.
// Wrap if we run out of inputs.
// Discard if we run out of outputs.
outputBuffer[outputChannel] = inputBuffer[inputChannel];
inputChannel = (inputChannel == inputChannelCount)
? 0 : inputChannel + 1;
}
inputBuffer += inputChannelCount;
outputBuffer += outputChannelCount;
}
return numFrames;
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
#define FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Change the number of number of channels without mixing.
* When increasing the channel count, duplicate input channels.
* When decreasing the channel count, drop input channels.
*/
class ChannelCountConverter : public FlowGraphNode {
public:
explicit ChannelCountConverter(
int32_t inputChannelCount,
int32_t outputChannelCount);
virtual ~ChannelCountConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "ChannelCountConverter";
}
FlowGraphPortFloatInput input;
FlowGraphPortFloatOutput output;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H

View File

@ -19,7 +19,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "ClipToRange.h" #include "ClipToRange.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ClipToRange::ClipToRange(int32_t channelCount) ClipToRange::ClipToRange(int32_t channelCount)
: FlowGraphFilter(channelCount) { : FlowGraphFilter(channelCount) {

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data. // This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
// It is designed to allow occasional transient peaks. // It is designed to allow occasional transient peaks.
@ -63,6 +63,6 @@ private:
float mMaximum = kDefaultMaxHeadroom; float mMaximum = kDefaultMaxHeadroom;
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CLIP_TO_RANGE_H #endif //FLOWGRAPH_CLIP_TO_RANGE_H

View File

@ -19,26 +19,24 @@
#include <sys/types.h> #include <sys/types.h>
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
/***************************************************************************/ /***************************************************************************/
int32_t FlowGraphNode::pullData(int64_t framePosition, int32_t numFrames) { int32_t FlowGraphNode::pullData(int32_t numFrames, int64_t callCount) {
int32_t frameCount = numFrames; int32_t frameCount = numFrames;
// Prevent recursion and multiple execution of nodes. // Prevent recursion and multiple execution of nodes.
if (framePosition <= mLastFramePosition && !mBlockRecursion) { if (callCount > mLastCallCount) {
mBlockRecursion = true; // for cyclic graphs mLastCallCount = callCount;
if (mDataPulledAutomatically) { if (mDataPulledAutomatically) {
// Pull from all the upstream nodes. // Pull from all the upstream nodes.
for (auto &port : mInputPorts) { for (auto &port : mInputPorts) {
// TODO fix bug of leaving unused data in some ports if using multiple AudioSource // TODO fix bug of leaving unused data in some ports if using multiple AudioSource
frameCount = port.get().pullData(framePosition, frameCount); frameCount = port.get().pullData(callCount, frameCount);
} }
} }
if (frameCount > 0) { if (frameCount > 0) {
frameCount = onProcess(frameCount); frameCount = onProcess(frameCount);
} }
mLastFramePosition += frameCount;
mBlockRecursion = false;
mLastFrameCount = frameCount; mLastFrameCount = frameCount;
} else { } else {
frameCount = mLastFrameCount; frameCount = mLastFrameCount;
@ -60,6 +58,7 @@ void FlowGraphNode::pullReset() {
void FlowGraphNode::reset() { void FlowGraphNode::reset() {
mLastFrameCount = 0; mLastFrameCount = 0;
mLastCallCount = kInitialCallCount;
} }
/***************************************************************************/ /***************************************************************************/
@ -69,14 +68,14 @@ FlowGraphPortFloat::FlowGraphPortFloat(FlowGraphNode &parent,
: FlowGraphPort(parent, samplesPerFrame) : FlowGraphPort(parent, samplesPerFrame)
, mFramesPerBuffer(framesPerBuffer) , mFramesPerBuffer(framesPerBuffer)
, mBuffer(nullptr) { , mBuffer(nullptr) {
size_t numFloats = static_cast<size_t>(framesPerBuffer * getSamplesPerFrame()); size_t numFloats = static_cast<size_t>(framesPerBuffer) * getSamplesPerFrame();
mBuffer = std::make_unique<float[]>(numFloats); mBuffer = std::make_unique<float[]>(numFloats);
} }
/***************************************************************************/ /***************************************************************************/
int32_t FlowGraphPortFloatOutput::pullData(int64_t framePosition, int32_t numFrames) { int32_t FlowGraphPortFloatOutput::pullData(int64_t callCount, int32_t numFrames) {
numFrames = std::min(getFramesPerBuffer(), numFrames); numFrames = std::min(getFramesPerBuffer(), numFrames);
return mContainingNode.pullData(framePosition, numFrames); return mContainingNode.pullData(numFrames, callCount);
} }
void FlowGraphPortFloatOutput::pullReset() { void FlowGraphPortFloatOutput::pullReset() {
@ -93,10 +92,10 @@ void FlowGraphPortFloatOutput::disconnect(FlowGraphPortFloatInput *port) {
} }
/***************************************************************************/ /***************************************************************************/
int32_t FlowGraphPortFloatInput::pullData(int64_t framePosition, int32_t numFrames) { int32_t FlowGraphPortFloatInput::pullData(int64_t callCount, int32_t numFrames) {
return (mConnected == nullptr) return (mConnected == nullptr)
? std::min(getFramesPerBuffer(), numFrames) ? std::min(getFramesPerBuffer(), numFrames)
: mConnected->pullData(framePosition, numFrames); : mConnected->pullData(callCount, numFrames);
} }
void FlowGraphPortFloatInput::pullReset() { void FlowGraphPortFloatInput::pullReset() {
if (mConnected != nullptr) mConnected->pullReset(); if (mConnected != nullptr) mConnected->pullReset();
@ -109,3 +108,7 @@ float *FlowGraphPortFloatInput::getBuffer() {
return mConnected->getBuffer(); return mConnected->getBuffer();
} }
} }
int32_t FlowGraphSink::pullData(int32_t numFrames) {
return FlowGraphNode::pullData(numFrames, getLastCallCount() + 1);
}

View File

@ -38,11 +38,26 @@
// TODO Review use of raw pointers for connect(). Maybe use smart pointers but need to avoid // TODO Review use of raw pointers for connect(). Maybe use smart pointers but need to avoid
// run-time deallocation in audio thread. // run-time deallocation in audio thread.
// Set this to 1 if using it inside the Android framework. // Set flags FLOWGRAPH_ANDROID_INTERNAL and FLOWGRAPH_OUTER_NAMESPACE based on whether compiler
// This code is kept here so that it can be moved easily between Oboe and AAudio. // flag __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe and not aaudio.
#define FLOWGRAPH_ANDROID_INTERNAL 0
namespace flowgraph { #ifndef FLOWGRAPH_ANDROID_INTERNAL
#ifdef __ANDROID_NDK__
#define FLOWGRAPH_ANDROID_INTERNAL 0
#else
#define FLOWGRAPH_ANDROID_INTERNAL 1
#endif // __ANDROID_NDK__
#endif // FLOWGRAPH_ANDROID_INTERNAL
#ifndef FLOWGRAPH_OUTER_NAMESPACE
#ifdef __ANDROID_NDK__
#define FLOWGRAPH_OUTER_NAMESPACE oboe
#else
#define FLOWGRAPH_OUTER_NAMESPACE aaudio
#endif // __ANDROID_NDK__
#endif // FLOWGRAPH_OUTER_NAMESPACE
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// Default block size that can be overridden when the FlowGraphPortFloat is created. // Default block size that can be overridden when the FlowGraphPortFloat is created.
// If it is too small then we will have too much overhead from switching between nodes. // If it is too small then we will have too much overhead from switching between nodes.
@ -58,7 +73,7 @@ class FlowGraphPortFloatInput;
*/ */
class FlowGraphNode { class FlowGraphNode {
public: public:
FlowGraphNode() {} FlowGraphNode() = default;
virtual ~FlowGraphNode() = default; virtual ~FlowGraphNode() = default;
/** /**
@ -71,15 +86,17 @@ public:
virtual int32_t onProcess(int32_t numFrames) = 0; virtual int32_t onProcess(int32_t numFrames) = 0;
/** /**
* If the framePosition is at or after the last frame position then call onProcess(). * If the callCount is at or after the previous callCount then call
* pullData on all of the upstreamNodes.
* Then call onProcess().
* This prevents infinite recursion in case of cyclic graphs. * This prevents infinite recursion in case of cyclic graphs.
* It also prevents nodes upstream from a branch from being executed twice. * It also prevents nodes upstream from a branch from being executed twice.
* *
* @param framePosition * @param callCount
* @param numFrames * @param numFrames
* @return number of frames valid * @return number of frames valid
*/ */
int32_t pullData(int64_t framePosition, int32_t numFrames); int32_t pullData(int32_t numFrames, int64_t callCount);
/** /**
* Recursively reset all the nodes in the graph, starting from a Sink. * Recursively reset all the nodes in the graph, starting from a Sink.
@ -94,7 +111,7 @@ public:
virtual void reset(); virtual void reset();
void addInputPort(FlowGraphPort &port) { void addInputPort(FlowGraphPort &port) {
mInputPorts.push_back(port); mInputPorts.emplace_back(port);
} }
bool isDataPulledAutomatically() const { bool isDataPulledAutomatically() const {
@ -118,12 +135,14 @@ public:
return "FlowGraph"; return "FlowGraph";
} }
int64_t getLastFramePosition() { int64_t getLastCallCount() {
return mLastFramePosition; return mLastCallCount;
} }
protected: protected:
int64_t mLastFramePosition = 0;
static constexpr int64_t kInitialCallCount = -1;
int64_t mLastCallCount = kInitialCallCount;
std::vector<std::reference_wrapper<FlowGraphPort>> mInputPorts; std::vector<std::reference_wrapper<FlowGraphPort>> mInputPorts;
@ -149,6 +168,8 @@ public:
, mSamplesPerFrame(samplesPerFrame) { , mSamplesPerFrame(samplesPerFrame) {
} }
virtual ~FlowGraphPort() = default;
// Ports are often declared public. So let's make them non-copyable. // Ports are often declared public. So let's make them non-copyable.
FlowGraphPort(const FlowGraphPort&) = delete; FlowGraphPort(const FlowGraphPort&) = delete;
FlowGraphPort& operator=(const FlowGraphPort&) = delete; FlowGraphPort& operator=(const FlowGraphPort&) = delete;
@ -385,7 +406,7 @@ public:
FlowGraphPortFloatInput input; FlowGraphPortFloatInput input;
/** /**
* Dummy processor. The work happens in the read() method. * Do nothing. The work happens in the read() method.
* *
* @param numFrames * @param numFrames
* @return number of frames actually processed * @return number of frames actually processed
@ -394,8 +415,15 @@ public:
return numFrames; return numFrames;
} }
virtual int32_t read(int64_t framePosition, void *data, int32_t numFrames) = 0; virtual int32_t read(void *data, int32_t numFrames) = 0;
protected:
/**
* Pull data through the graph using this nodes last callCount.
* @param numFrames
* @return
*/
int32_t pullData(int32_t numFrames);
}; };
/***************************************************************************/ /***************************************************************************/
@ -417,6 +445,6 @@ public:
FlowGraphPortFloatOutput output; FlowGraphPortFloatOutput output;
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif /* FLOWGRAPH_FLOW_GRAPH_NODE_H */ #endif /* FLOWGRAPH_FLOW_GRAPH_NODE_H */

View File

@ -0,0 +1,55 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_UTILITIES_H
#define FLOWGRAPH_UTILITIES_H
#include <unistd.h>
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
class FlowgraphUtilities {
public:
// This was copied from audio_utils/primitives.h
/**
* Convert a single-precision floating point value to a Q0.31 integer value.
* Rounds to nearest, ties away from 0.
*
* Values outside the range [-1.0, 1.0) are properly clamped to -2147483648 and 2147483647,
* including -Inf and +Inf. NaN values are considered undefined, and behavior may change
* depending on hardware and future implementation of this function.
*/
static int32_t clamp32FromFloat(float f)
{
static const float scale = (float)(1UL << 31);
static const float limpos = 1.;
static const float limneg = -1.;
if (f <= limneg) {
return INT32_MIN;
} else if (f >= limpos) {
return INT32_MAX;
}
f *= scale;
/* integer conversion is through truncation (though int to float is not).
* ensure that we round to nearest, ties away from 0.
*/
return f > 0 ? f + 0.5 : f - 0.5;
}
};
#endif // FLOWGRAPH_UTILITIES_H

View File

@ -18,7 +18,7 @@
#include "ManyToMultiConverter.h" #include "ManyToMultiConverter.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ManyToMultiConverter::ManyToMultiConverter(int32_t channelCount) ManyToMultiConverter::ManyToMultiConverter(int32_t channelCount)
: inputs(channelCount) : inputs(channelCount)

View File

@ -23,6 +23,8 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* Combine multiple mono inputs into one interleaved multi-channel output. * Combine multiple mono inputs into one interleaved multi-channel output.
*/ */
@ -34,7 +36,7 @@ public:
int32_t onProcess(int numFrames) override; int32_t onProcess(int numFrames) override;
void setEnabled(bool enabled) {} void setEnabled(bool /*enabled*/) {}
std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatInput>> inputs; std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatInput>> inputs;
flowgraph::FlowGraphPortFloatOutput output; flowgraph::FlowGraphPortFloatOutput output;
@ -46,4 +48,6 @@ public:
private: private:
}; };
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H #endif //FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H

View File

@ -0,0 +1,46 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "MonoBlend.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoBlend::MonoBlend(int32_t channelCount)
: FlowGraphFilter(channelCount)
, mInvChannelCount(1. / channelCount)
{
}
int32_t MonoBlend::onProcess(int32_t numFrames) {
int32_t channelCount = output.getSamplesPerFrame();
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
for (size_t i = 0; i < numFrames; ++i) {
float accum = 0;
for (size_t j = 0; j < channelCount; ++j) {
accum += *inputBuffer++;
}
accum *= mInvChannelCount;
for (size_t j = 0; j < channelCount; ++j) {
*outputBuffer++ = accum;
}
}
return numFrames;
}

View File

@ -0,0 +1,48 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MONO_BLEND_H
#define FLOWGRAPH_MONO_BLEND_H
#include <sys/types.h>
#include <unistd.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Combine data between multiple channels so each channel is an average
* of all channels.
*/
class MonoBlend : public FlowGraphFilter {
public:
explicit MonoBlend(int32_t channelCount);
virtual ~MonoBlend() = default;
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MonoBlend";
}
private:
const float mInvChannelCount;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_BLEND

View File

@ -18,15 +18,13 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "MonoToMultiConverter.h" #include "MonoToMultiConverter.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoToMultiConverter::MonoToMultiConverter(int32_t channelCount) MonoToMultiConverter::MonoToMultiConverter(int32_t outputChannelCount)
: input(*this, 1) : input(*this, 1)
, output(*this, channelCount) { , output(*this, outputChannelCount) {
} }
MonoToMultiConverter::~MonoToMultiConverter() { }
int32_t MonoToMultiConverter::onProcess(int32_t numFrames) { int32_t MonoToMultiConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer(); const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer(); float *outputBuffer = output.getBuffer();

View File

@ -22,17 +22,17 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* Convert a monophonic stream to a multi-channel stream * Convert a monophonic stream to a multi-channel interleaved stream
* with the same signal on each channel. * with the same signal on each channel.
*/ */
class MonoToMultiConverter : public FlowGraphNode { class MonoToMultiConverter : public FlowGraphNode {
public: public:
explicit MonoToMultiConverter(int32_t channelCount); explicit MonoToMultiConverter(int32_t outputChannelCount);
virtual ~MonoToMultiConverter(); virtual ~MonoToMultiConverter() = default;
int32_t onProcess(int32_t numFrames) override; int32_t onProcess(int32_t numFrames) override;
@ -44,6 +44,6 @@ public:
FlowGraphPortFloatOutput output; FlowGraphPortFloatOutput output;
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H #endif //FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H

View File

@ -0,0 +1,47 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "MultiToManyConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToManyConverter::MultiToManyConverter(int32_t channelCount)
: outputs(channelCount)
, input(*this, channelCount) {
for (int i = 0; i < channelCount; i++) {
outputs[i] = std::make_unique<FlowGraphPortFloatOutput>(*this, 1);
}
}
MultiToManyConverter::~MultiToManyConverter() = default;
int32_t MultiToManyConverter::onProcess(int32_t numFrames) {
int32_t channelCount = input.getSamplesPerFrame();
for (int ch = 0; ch < channelCount; ch++) {
const float *inputBuffer = input.getBuffer() + ch;
float *outputBuffer = outputs[ch]->getBuffer();
for (int i = 0; i < numFrames; i++) {
*outputBuffer++ = *inputBuffer;
inputBuffer += channelCount;
}
}
return numFrames;
}

View File

@ -0,0 +1,49 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
#define FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to multiple mono-channel
* outputs
*/
class MultiToManyConverter : public FlowGraphNode {
public:
explicit MultiToManyConverter(int32_t channelCount);
virtual ~MultiToManyConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MultiToManyConverter";
}
std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatOutput>> outputs;
flowgraph::FlowGraphPortFloatInput input;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H

View File

@ -0,0 +1,41 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "MultiToMonoConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToMonoConverter::MultiToMonoConverter(int32_t inputChannelCount)
: input(*this, inputChannelCount)
, output(*this, 1) {
}
MultiToMonoConverter::~MultiToMonoConverter() = default;
int32_t MultiToMonoConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
int32_t channelCount = input.getSamplesPerFrame();
for (int i = 0; i < numFrames; i++) {
// read first channel of multi stream, write many
*outputBuffer++ = *inputBuffer;
inputBuffer += channelCount;
}
return numFrames;
}

View File

@ -0,0 +1,49 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
#define FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to a monophonic stream
* by extracting channel[0].
*/
class MultiToMonoConverter : public FlowGraphNode {
public:
explicit MultiToMonoConverter(int32_t inputChannelCount);
virtual ~MultiToMonoConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MultiToMonoConverter";
}
FlowGraphPortFloatInput input;
FlowGraphPortFloatOutput output;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H

View File

@ -19,7 +19,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "RampLinear.h" #include "RampLinear.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
RampLinear::RampLinear(int32_t channelCount) RampLinear::RampLinear(int32_t channelCount)
: FlowGraphFilter(channelCount) { : FlowGraphFilter(channelCount) {
@ -32,6 +32,10 @@ void RampLinear::setLengthInFrames(int32_t frames) {
void RampLinear::setTarget(float target) { void RampLinear::setTarget(float target) {
mTarget.store(target); mTarget.store(target);
// If the ramp has not been used then start immediately at this level.
if (mLastCallCount == kInitialCallCount) {
forceCurrent(target);
}
} }
float RampLinear::interpolateCurrent() { float RampLinear::interpolateCurrent() {

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* When the target is modified then the output will ramp smoothly * When the target is modified then the output will ramp smoothly
@ -91,6 +91,6 @@ private:
float mLevelTo = 0.0f; float mLevelTo = 0.0f;
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_RAMP_LINEAR_H #endif //FLOWGRAPH_RAMP_LINEAR_H

View File

@ -16,20 +16,27 @@
#include "SampleRateConverter.h" #include "SampleRateConverter.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
using namespace resampler; using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
SampleRateConverter::SampleRateConverter(int32_t channelCount, MultiChannelResampler &resampler) SampleRateConverter::SampleRateConverter(int32_t channelCount,
MultiChannelResampler &resampler)
: FlowGraphFilter(channelCount) : FlowGraphFilter(channelCount)
, mResampler(resampler) { , mResampler(resampler) {
setDataPulledAutomatically(false); setDataPulledAutomatically(false);
} }
void SampleRateConverter::reset() {
FlowGraphNode::reset();
mInputCursor = kInitialCallCount;
}
// Return true if there is a sample available. // Return true if there is a sample available.
bool SampleRateConverter::isInputAvailable() { bool SampleRateConverter::isInputAvailable() {
// If we have consumed all of the input data then go out and get some more.
if (mInputCursor >= mNumValidInputFrames) { if (mInputCursor >= mNumValidInputFrames) {
mNumValidInputFrames = input.pullData(mInputFramePosition, input.getFramesPerBuffer()); mInputCallCount++;
mInputFramePosition += mNumValidInputFrames; mNumValidInputFrames = input.pullData(mInputCallCount, input.getFramesPerBuffer());
mInputCursor = 0; mInputCursor = 0;
} }
return (mInputCursor < mNumValidInputFrames); return (mInputCursor < mNumValidInputFrames);

View File

@ -14,8 +14,8 @@
* limitations under the License. * limitations under the License.
*/ */
#ifndef OBOE_SAMPLE_RATE_CONVERTER_H #ifndef FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
#define OBOE_SAMPLE_RATE_CONVERTER_H #define FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
#include <unistd.h> #include <unistd.h>
#include <sys/types.h> #include <sys/types.h>
@ -23,11 +23,12 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "resampler/MultiChannelResampler.h" #include "resampler/MultiChannelResampler.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SampleRateConverter : public FlowGraphFilter { class SampleRateConverter : public FlowGraphFilter {
public: public:
explicit SampleRateConverter(int32_t channelCount, resampler::MultiChannelResampler &mResampler); explicit SampleRateConverter(int32_t channelCount,
resampler::MultiChannelResampler &mResampler);
virtual ~SampleRateConverter() = default; virtual ~SampleRateConverter() = default;
@ -37,6 +38,8 @@ public:
return "SampleRateConverter"; return "SampleRateConverter";
} }
void reset() override;
private: private:
// Return true if there is a sample available. // Return true if there is a sample available.
@ -47,10 +50,14 @@ private:
resampler::MultiChannelResampler &mResampler; resampler::MultiChannelResampler &mResampler;
int32_t mInputCursor = 0; int32_t mInputCursor = 0; // offset into the input port buffer
int32_t mNumValidInputFrames = 0; int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
int64_t mInputFramePosition = 0; // monotonic counter of input frames used for pullData // We need our own callCount for upstream calls because calls occur at a different rate.
// This means we cannot have cyclic graphs or merges that contain an SRC.
int64_t mInputCallCount = 0;
}; };
} /* namespace flowgraph */
#endif //OBOE_SAMPLE_RATE_CONVERTER_H } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SAMPLE_RATE_CONVERTER_H

View File

@ -19,22 +19,20 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "SinkFloat.h" #include "SinkFloat.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkFloat::SinkFloat(int32_t channelCount) SinkFloat::SinkFloat(int32_t channelCount)
: FlowGraphSink(channelCount) { : FlowGraphSink(channelCount) {
} }
int32_t SinkFloat::read(int64_t framePosition, void *data, int32_t numFrames) { int32_t SinkFloat::read(void *data, int32_t numFrames) {
// printf("SinkFloat::read(,,%d)\n", numFrames);
float *floatData = (float *) data; float *floatData = (float *) data;
int32_t channelCount = input.getSamplesPerFrame(); const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames; int32_t framesLeft = numFrames;
while (framesLeft > 0) { while (framesLeft > 0) {
// Run the graph and pull data through the input port. // Run the graph and pull data through the input port.
int32_t framesPulled = pullData(framePosition, framesLeft); int32_t framesPulled = pullData(framesLeft);
// printf("SinkFloat::read: framesLeft = %d, framesPulled = %d\n", framesLeft, framesPulled);
if (framesPulled <= 0) { if (framesPulled <= 0) {
break; break;
} }
@ -43,8 +41,6 @@ int32_t SinkFloat::read(int64_t framePosition, void *data, int32_t numFrames) {
memcpy(floatData, signal, numSamples * sizeof(float)); memcpy(floatData, signal, numSamples * sizeof(float));
floatData += numSamples; floatData += numSamples;
framesLeft -= framesPulled; framesLeft -= framesPulled;
framePosition += framesPulled;
} }
// printf("SinkFloat returning %d\n", numFrames - framesLeft);
return numFrames - framesLeft; return numFrames - framesLeft;
} }

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSink that lets you read data as 32-bit floats. * AudioSink that lets you read data as 32-bit floats.
@ -31,14 +31,15 @@ namespace flowgraph {
class SinkFloat : public FlowGraphSink { class SinkFloat : public FlowGraphSink {
public: public:
explicit SinkFloat(int32_t channelCount); explicit SinkFloat(int32_t channelCount);
~SinkFloat() override = default;
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override; int32_t read(void *data, int32_t numFrames) override;
const char *getName() override { const char *getName() override {
return "SinkFloat"; return "SinkFloat";
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_FLOAT_H #endif //FLOWGRAPH_SINK_FLOAT_H

View File

@ -23,19 +23,19 @@
#include <audio_utils/primitives.h> #include <audio_utils/primitives.h>
#endif #endif
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI16::SinkI16(int32_t channelCount) SinkI16::SinkI16(int32_t channelCount)
: FlowGraphSink(channelCount) {} : FlowGraphSink(channelCount) {}
int32_t SinkI16::read(int64_t framePosition, void *data, int32_t numFrames) { int32_t SinkI16::read(void *data, int32_t numFrames) {
int16_t *shortData = (int16_t *) data; int16_t *shortData = (int16_t *) data;
const int32_t channelCount = input.getSamplesPerFrame(); const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames; int32_t framesLeft = numFrames;
while (framesLeft > 0) { while (framesLeft > 0) {
// Run the graph and pull data through the input port. // Run the graph and pull data through the input port.
int32_t framesRead = pullData(framePosition, framesLeft); int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) { if (framesRead <= 0) {
break; break;
} }
@ -52,7 +52,6 @@ int32_t SinkI16::read(int64_t framePosition, void *data, int32_t numFrames) {
} }
#endif #endif
framesLeft -= framesRead; framesLeft -= framesRead;
framePosition += framesRead;
} }
return numFrames - framesLeft; return numFrames - framesLeft;
} }

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSink that lets you read data as 16-bit signed integers. * AudioSink that lets you read data as 16-bit signed integers.
@ -31,13 +31,13 @@ class SinkI16 : public FlowGraphSink {
public: public:
explicit SinkI16(int32_t channelCount); explicit SinkI16(int32_t channelCount);
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override; int32_t read(void *data, int32_t numFrames) override;
const char *getName() override { const char *getName() override {
return "SinkI16"; return "SinkI16";
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I16_H #endif //FLOWGRAPH_SINK_I16_H

View File

@ -25,19 +25,19 @@
#include <audio_utils/primitives.h> #include <audio_utils/primitives.h>
#endif #endif
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI24::SinkI24(int32_t channelCount) SinkI24::SinkI24(int32_t channelCount)
: FlowGraphSink(channelCount) {} : FlowGraphSink(channelCount) {}
int32_t SinkI24::read(int64_t framePosition, void *data, int32_t numFrames) { int32_t SinkI24::read(void *data, int32_t numFrames) {
uint8_t *byteData = (uint8_t *) data; uint8_t *byteData = (uint8_t *) data;
const int32_t channelCount = input.getSamplesPerFrame(); const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames; int32_t framesLeft = numFrames;
while (framesLeft > 0) { while (framesLeft > 0) {
// Run the graph and pull data through the input port. // Run the graph and pull data through the input port.
int32_t framesRead = pullData(framePosition, framesLeft); int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) { if (framesRead <= 0) {
break; break;
} }
@ -61,7 +61,6 @@ int32_t SinkI24::read(int64_t framePosition, void *data, int32_t numFrames) {
} }
#endif #endif
framesLeft -= framesRead; framesLeft -= framesRead;
framePosition += framesRead;
} }
return numFrames - framesLeft; return numFrames - framesLeft;
} }

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSink that lets you read data as packed 24-bit signed integers. * AudioSink that lets you read data as packed 24-bit signed integers.
@ -32,13 +32,13 @@ class SinkI24 : public FlowGraphSink {
public: public:
explicit SinkI24(int32_t channelCount); explicit SinkI24(int32_t channelCount);
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override; int32_t read(void *data, int32_t numFrames) override;
const char *getName() override { const char *getName() override {
return "SinkI24"; return "SinkI24";
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I24_H #endif //FLOWGRAPH_SINK_I24_H

View File

@ -0,0 +1,55 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "FlowGraphNode.h"
#include "FlowgraphUtilities.h"
#include "SinkI32.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI32::SinkI32(int32_t channelCount)
: FlowGraphSink(channelCount) {}
int32_t SinkI32::read(void *data, int32_t numFrames) {
int32_t *intData = (int32_t *) data;
const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
// Run the graph and pull data through the input port.
int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) {
break;
}
const float *signal = input.getBuffer();
int32_t numSamples = framesRead * channelCount;
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_i32_from_float(intData, signal, numSamples);
intData += numSamples;
signal += numSamples;
#else
for (int i = 0; i < numSamples; i++) {
*intData++ = FlowgraphUtilities::clamp32FromFloat(*signal++);
}
#endif
framesLeft -= framesRead;
}
return numFrames - framesLeft;
}

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SINK_I32_H
#define FLOWGRAPH_SINK_I32_H
#include <stdint.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SinkI32 : public FlowGraphSink {
public:
explicit SinkI32(int32_t channelCount);
~SinkI32() override = default;
int32_t read(void *data, int32_t numFrames) override;
const char *getName() override {
return "SinkI32";
}
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I32_H

View File

@ -14,13 +14,12 @@
* limitations under the License. * limitations under the License.
*/ */
#include "common/OboeDebug.h"
#include <algorithm> #include <algorithm>
#include <unistd.h> #include <unistd.h>
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
#include "SourceFloat.h" #include "SourceFloat.h"
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceFloat::SourceFloat(int32_t channelCount) SourceFloat::SourceFloat(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) { : FlowGraphSourceBuffered(channelCount) {
@ -28,11 +27,11 @@ SourceFloat::SourceFloat(int32_t channelCount)
int32_t SourceFloat::onProcess(int32_t numFrames) { int32_t SourceFloat::onProcess(int32_t numFrames) {
float *outputBuffer = output.getBuffer(); float *outputBuffer = output.getBuffer();
int32_t channelCount = output.getSamplesPerFrame(); const int32_t channelCount = output.getSamplesPerFrame();
int32_t framesLeft = mSizeInFrames - mFrameIndex; const int32_t framesLeft = mSizeInFrames - mFrameIndex;
int32_t framesToProcess = std::min(numFrames, framesLeft); const int32_t framesToProcess = std::min(numFrames, framesLeft);
int32_t numSamples = framesToProcess * channelCount; const int32_t numSamples = framesToProcess * channelCount;
const float *floatBase = (float *) mData; const float *floatBase = (float *) mData;
const float *floatData = &floatBase[mFrameIndex * channelCount]; const float *floatData = &floatBase[mFrameIndex * channelCount];

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSource that reads a block of pre-defined float data. * AudioSource that reads a block of pre-defined float data.
@ -30,6 +30,7 @@ namespace flowgraph {
class SourceFloat : public FlowGraphSourceBuffered { class SourceFloat : public FlowGraphSourceBuffered {
public: public:
explicit SourceFloat(int32_t channelCount); explicit SourceFloat(int32_t channelCount);
~SourceFloat() override = default;
int32_t onProcess(int32_t numFrames) override; int32_t onProcess(int32_t numFrames) override;
@ -38,6 +39,6 @@ public:
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_FLOAT_H #endif //FLOWGRAPH_SOURCE_FLOAT_H

View File

@ -24,7 +24,7 @@
#include <audio_utils/primitives.h> #include <audio_utils/primitives.h>
#endif #endif
using namespace flowgraph; using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI16::SourceI16(int32_t channelCount) SourceI16::SourceI16(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) { : FlowGraphSourceBuffered(channelCount) {

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSource that reads a block of pre-defined 16-bit integer data. * AudioSource that reads a block of pre-defined 16-bit integer data.
*/ */
@ -37,6 +37,6 @@ public:
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I16_H #endif //FLOWGRAPH_SOURCE_I16_H

View File

@ -17,14 +17,14 @@
#include <algorithm> #include <algorithm>
#include <unistd.h> #include <unistd.h>
#include "FlowGraphNode.h"
#include "SourceI24.h"
#if FLOWGRAPH_ANDROID_INTERNAL #if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h> #include <audio_utils/primitives.h>
#endif #endif
#include "FlowGraphNode.h" using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
#include "SourceI24.h"
using namespace flowgraph;
constexpr int kBytesPerI24Packed = 3; constexpr int kBytesPerI24Packed = 3;

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h" #include "FlowGraphNode.h"
namespace flowgraph { namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/** /**
* AudioSource that reads a block of pre-defined 24-bit packed integer data. * AudioSource that reads a block of pre-defined 24-bit packed integer data.
@ -38,6 +38,6 @@ public:
} }
}; };
} /* namespace flowgraph */ } /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I24_H #endif //FLOWGRAPH_SOURCE_I24_H

View File

@ -0,0 +1,54 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "FlowGraphNode.h"
#include "SourceI32.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI32::SourceI32(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
}
int32_t SourceI32::onProcess(int32_t numFrames) {
float *floatData = output.getBuffer();
const int32_t channelCount = output.getSamplesPerFrame();
const int32_t framesLeft = mSizeInFrames - mFrameIndex;
const int32_t framesToProcess = std::min(numFrames, framesLeft);
const int32_t numSamples = framesToProcess * channelCount;
const int32_t *intBase = static_cast<const int32_t *>(mData);
const int32_t *intData = &intBase[mFrameIndex * channelCount];
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_i32(floatData, intData, numSamples);
#else
for (int i = 0; i < numSamples; i++) {
*floatData++ = *intData++ * kScale;
}
#endif
mFrameIndex += framesToProcess;
return framesToProcess;
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SOURCE_I32_H
#define FLOWGRAPH_SOURCE_I32_H
#include <stdint.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SourceI32 : public FlowGraphSourceBuffered {
public:
explicit SourceI32(int32_t channelCount);
~SourceI32() override = default;
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI32";
}
private:
static constexpr float kScale = 1.0 / (1UL << 31);
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I32_H

View File

@ -19,7 +19,9 @@
#include <math.h> #include <math.h>
namespace resampler { #include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/** /**
* Calculate a HyperbolicCosineWindow window centered at 0. * Calculate a HyperbolicCosineWindow window centered at 0.
@ -64,5 +66,6 @@ private:
double mInverseCoshAlpha = 1.0; double mInverseCoshAlpha = 1.0;
}; };
} // namespace resampler } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H #endif //RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H

View File

@ -16,7 +16,7 @@
#include "IntegerRatio.h" #include "IntegerRatio.h"
using namespace resampler; using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
// Enough primes to cover the common sample rates. // Enough primes to cover the common sample rates.
static const int kPrimes[] = { static const int kPrimes[] = {

View File

@ -14,12 +14,14 @@
* limitations under the License. * limitations under the License.
*/ */
#ifndef OBOE_INTEGER_RATIO_H #ifndef RESAMPLER_INTEGER_RATIO_H
#define OBOE_INTEGER_RATIO_H #define RESAMPLER_INTEGER_RATIO_H
#include <sys/types.h> #include <sys/types.h>
namespace resampler { #include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/** /**
* Represent the ratio of two integers. * Represent the ratio of two integers.
@ -47,6 +49,6 @@ private:
int32_t mDenominator; int32_t mDenominator;
}; };
} } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_INTEGER_RATIO_H #endif //RESAMPLER_INTEGER_RATIO_H

View File

@ -19,7 +19,9 @@
#include <math.h> #include <math.h>
namespace resampler { #include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/** /**
* Calculate a Kaiser window centered at 0. * Calculate a Kaiser window centered at 0.
@ -83,5 +85,6 @@ private:
double mInverseBesselBeta = 1.0; double mInverseBesselBeta = 1.0;
}; };
} // namespace resampler } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_KAISER_WINDOW_H #endif //RESAMPLER_KAISER_WINDOW_H

View File

@ -16,7 +16,7 @@
#include "LinearResampler.h" #include "LinearResampler.h"
using namespace resampler; using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
LinearResampler::LinearResampler(const MultiChannelResampler::Builder &builder) LinearResampler::LinearResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder) { : MultiChannelResampler(builder) {

View File

@ -14,22 +14,24 @@
* limitations under the License. * limitations under the License.
*/ */
#ifndef OBOE_LINEAR_RESAMPLER_H #ifndef RESAMPLER_LINEAR_RESAMPLER_H
#define OBOE_LINEAR_RESAMPLER_H #define RESAMPLER_LINEAR_RESAMPLER_H
#include <memory> #include <memory>
#include <sys/types.h> #include <sys/types.h>
#include <unistd.h> #include <unistd.h>
#include "MultiChannelResampler.h"
namespace resampler { #include "MultiChannelResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/** /**
* Simple resampler that uses bi-linear interpolation. * Simple resampler that uses bi-linear interpolation.
*/ */
class LinearResampler : public MultiChannelResampler { class LinearResampler : public MultiChannelResampler {
public: public:
LinearResampler(const MultiChannelResampler::Builder &builder); explicit LinearResampler(const MultiChannelResampler::Builder &builder);
void writeFrame(const float *frame) override; void writeFrame(const float *frame) override;
@ -40,5 +42,6 @@ private:
std::unique_ptr<float[]> mCurrentFrame; std::unique_ptr<float[]> mCurrentFrame;
}; };
} // namespace resampler } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_LINEAR_RESAMPLER_H
#endif //RESAMPLER_LINEAR_RESAMPLER_H

View File

@ -25,11 +25,12 @@
#include "SincResampler.h" #include "SincResampler.h"
#include "SincResamplerStereo.h" #include "SincResamplerStereo.h"
using namespace resampler; using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builder &builder) MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builder &builder)
: mNumTaps(builder.getNumTaps()) : mNumTaps(builder.getNumTaps())
, mX(builder.getChannelCount() * builder.getNumTaps() * 2) , mX(static_cast<size_t>(builder.getChannelCount())
* static_cast<size_t>(builder.getNumTaps()) * 2)
, mSingleFrame(builder.getChannelCount()) , mSingleFrame(builder.getChannelCount())
, mChannelCount(builder.getChannelCount()) , mChannelCount(builder.getChannelCount())
{ {
@ -39,7 +40,7 @@ MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builde
ratio.reduce(); ratio.reduce();
mNumerator = ratio.getNumerator(); mNumerator = ratio.getNumerator();
mDenominator = ratio.getDenominator(); mDenominator = ratio.getDenominator();
mIntegerPhase = mDenominator; mIntegerPhase = mDenominator; // so we start with a write needed
} }
// static factory method // static factory method
@ -110,7 +111,7 @@ void MultiChannelResampler::writeFrame(const float *frame) {
if (--mCursor < 0) { if (--mCursor < 0) {
mCursor = getNumTaps() - 1; mCursor = getNumTaps() - 1;
} }
float *dest = &mX[mCursor * getChannelCount()]; float *dest = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
int offset = getNumTaps() * getChannelCount(); int offset = getNumTaps() * getChannelCount();
for (int channel = 0; channel < getChannelCount(); channel++) { for (int channel = 0; channel < getChannelCount(); channel++) {
// Write twice so we avoid having to wrap when reading. // Write twice so we avoid having to wrap when reading.
@ -130,7 +131,7 @@ void MultiChannelResampler::generateCoefficients(int32_t inputRate,
int32_t numRows, int32_t numRows,
double phaseIncrement, double phaseIncrement,
float normalizedCutoff) { float normalizedCutoff) {
mCoefficients.resize(getNumTaps() * numRows); mCoefficients.resize(static_cast<size_t>(getNumTaps()) * static_cast<size_t>(numRows));
int coefficientIndex = 0; int coefficientIndex = 0;
double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
// Stretch the sinc function for low pass filtering. // Stretch the sinc function for low pass filtering.
@ -150,7 +151,7 @@ void MultiChannelResampler::generateCoefficients(int32_t inputRate,
#if MCR_USE_KAISER #if MCR_USE_KAISER
float window = mKaiserWindow(tapPhase * numTapsHalfInverse); float window = mKaiserWindow(tapPhase * numTapsHalfInverse);
#else #else
float window = mCoshWindow(tapPhase * numTapsHalfInverse); float window = mCoshWindow(static_cast<double>(tapPhase) * numTapsHalfInverse);
#endif #endif
float coefficient = sinc(radians * cutoffScaler) * window; float coefficient = sinc(radians * cutoffScaler) * window;
mCoefficients.at(coefficientIndex++) = coefficient; mCoefficients.at(coefficientIndex++) = coefficient;

View File

@ -14,8 +14,8 @@
* limitations under the License. * limitations under the License.
*/ */
#ifndef OBOE_MULTICHANNEL_RESAMPLER_H #ifndef RESAMPLER_MULTICHANNEL_RESAMPLER_H
#define OBOE_MULTICHANNEL_RESAMPLER_H #define RESAMPLER_MULTICHANNEL_RESAMPLER_H
#include <memory> #include <memory>
#include <vector> #include <vector>
@ -34,7 +34,9 @@
#include "HyperbolicCosineWindow.h" #include "HyperbolicCosineWindow.h"
#endif #endif
namespace resampler { #include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class MultiChannelResampler { class MultiChannelResampler {
@ -267,5 +269,6 @@ private:
const int mChannelCount; const int mChannelCount;
}; };
} } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_MULTICHANNEL_RESAMPLER_H
#endif //RESAMPLER_MULTICHANNEL_RESAMPLER_H

View File

@ -14,12 +14,12 @@
* limitations under the License. * limitations under the License.
*/ */
#include <assert.h> #include <cassert>
#include <math.h> #include <math.h>
#include "IntegerRatio.h" #include "IntegerRatio.h"
#include "PolyphaseResampler.h" #include "PolyphaseResampler.h"
using namespace resampler; using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
PolyphaseResampler::PolyphaseResampler(const MultiChannelResampler::Builder &builder) PolyphaseResampler::PolyphaseResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder) : MultiChannelResampler(builder)
@ -40,13 +40,11 @@ void PolyphaseResampler::readFrame(float *frame) {
// Clear accumulator for mixing. // Clear accumulator for mixing.
std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0); std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0);
// printf("PolyphaseResampler: mCoefficientCursor = %4d\n", mCoefficientCursor);
// Multiply input times windowed sinc function. // Multiply input times windowed sinc function.
float *coefficients = &mCoefficients[mCoefficientCursor]; float *coefficients = &mCoefficients[mCoefficientCursor];
float *xFrame = &mX[mCursor * getChannelCount()]; float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) { for (int i = 0; i < mNumTaps; i++) {
float coefficient = *coefficients++; float coefficient = *coefficients++;
// printf("PolyphaseResampler: coeff = %10.6f, xFrame[0] = %10.6f\n", coefficient, xFrame[0]);
for (int channel = 0; channel < getChannelCount(); channel++) { for (int channel = 0; channel < getChannelCount(); channel++) {
mSingleFrame[channel] += *xFrame++ * coefficient; mSingleFrame[channel] += *xFrame++ * coefficient;
} }

View File

@ -14,19 +14,21 @@
* limitations under the License. * limitations under the License.
*/ */
#ifndef OBOE_POLYPHASE_RESAMPLER_H #ifndef RESAMPLER_POLYPHASE_RESAMPLER_H
#define OBOE_POLYPHASE_RESAMPLER_H #define RESAMPLER_POLYPHASE_RESAMPLER_H
#include <memory> #include <memory>
#include <vector> #include <vector>
#include <sys/types.h> #include <sys/types.h>
#include <unistd.h> #include <unistd.h>
#include "MultiChannelResampler.h"
namespace resampler { #include "MultiChannelResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/** /**
* Resample that is optimized for a reduced ratio of sample rates. * Resampler that is optimized for a reduced ratio of sample rates.
* All of the coefficients for eacxh possible phase value are precalculated. * All of the coefficients for each possible phase value are pre-calculated.
*/ */
class PolyphaseResampler : public MultiChannelResampler { class PolyphaseResampler : public MultiChannelResampler {
public: public:
@ -46,6 +48,6 @@ protected:
}; };
} } /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_POLYPHASE_RESAMPLER_H #endif //RESAMPLER_POLYPHASE_RESAMPLER_H

Some files were not shown because too many files have changed in this diff Show More