Update Oboe to 1.7.0

This commit is contained in:
Attila Uygun 2023-05-21 22:37:03 +02:00
parent 217420823c
commit be36d121b0
109 changed files with 3499 additions and 686 deletions

View File

@ -18,26 +18,35 @@ set (oboe_sources
src/common/FixedBlockReader.cpp
src/common/FixedBlockWriter.cpp
src/common/LatencyTuner.cpp
src/common/OboeExtensions.cpp
src/common/SourceFloatCaller.cpp
src/common/SourceI16Caller.cpp
src/common/SourceI24Caller.cpp
src/common/SourceI32Caller.cpp
src/common/Utilities.cpp
src/common/QuirksManager.cpp
src/fifo/FifoBuffer.cpp
src/fifo/FifoController.cpp
src/fifo/FifoControllerBase.cpp
src/fifo/FifoControllerIndirect.cpp
src/flowgraph/FlowGraphNode.cpp
src/flowgraph/FlowGraphNode.cpp
src/flowgraph/ChannelCountConverter.cpp
src/flowgraph/ClipToRange.cpp
src/flowgraph/ManyToMultiConverter.cpp
src/flowgraph/MonoBlend.cpp
src/flowgraph/MonoToMultiConverter.cpp
src/flowgraph/MultiToManyConverter.cpp
src/flowgraph/MultiToMonoConverter.cpp
src/flowgraph/RampLinear.cpp
src/flowgraph/SampleRateConverter.cpp
src/flowgraph/SinkFloat.cpp
src/flowgraph/SinkI16.cpp
src/flowgraph/SinkI24.cpp
src/flowgraph/SinkI32.cpp
src/flowgraph/SourceFloat.cpp
src/flowgraph/SourceI16.cpp
src/flowgraph/SourceI24.cpp
src/flowgraph/SourceI32.cpp
src/flowgraph/resampler/IntegerRatio.cpp
src/flowgraph/resampler/LinearResampler.cpp
src/flowgraph/resampler/MultiChannelResampler.cpp
@ -70,7 +79,7 @@ target_include_directories(oboe
# Enable -Ofast
target_compile_options(oboe
PRIVATE
-std=c++14
-std=c++17
-Wall
-Wextra-semi
-Wshadow

202
src/third_party/oboe/LICENSE vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -130,7 +130,7 @@ public:
*
* @return state or a negative error.
*/
virtual StreamState getState() const = 0;
virtual StreamState getState() = 0;
/**
* Wait until the stream's current state no longer matches the input state.
@ -191,7 +191,7 @@ public:
* @return a result which is either Result::OK with the xRun count as the value, or a
* Result::Error* code
*/
virtual ResultWithValue<int32_t> getXRunCount() const {
virtual ResultWithValue<int32_t> getXRunCount() {
return ResultWithValue<int32_t>(Result::ErrorUnimplemented);
}
@ -205,7 +205,9 @@ public:
*
* @return burst size
*/
virtual int32_t getFramesPerBurst() = 0;
int32_t getFramesPerBurst() const {
return mFramesPerBurst;
}
/**
* Get the number of bytes in each audio frame. This is calculated using the channel count
@ -260,6 +262,10 @@ public:
* The latency of an OUTPUT stream is generally higher than the INPUT latency
* because an app generally tries to keep the OUTPUT buffer full and the INPUT buffer empty.
*
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* @return a ResultWithValue which has a result of Result::OK and a value containing the latency
* in milliseconds, or a result of Result::Error*.
*/
@ -278,6 +284,10 @@ public:
* The time is based on the implementation's best effort, using whatever knowledge is available
* to the system, but cannot account for any delay unknown to the implementation.
*
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* @deprecated since 1.0, use AudioStream::getTimestamp(clockid_t clockId) instead, which
* returns ResultWithValue
* @param clockId the type of clock to use e.g. CLOCK_MONOTONIC
@ -301,6 +311,11 @@ public:
* The time is based on the implementation's best effort, using whatever knowledge is available
* to the system, but cannot account for any delay unknown to the implementation.
*
* Note that due to issues in Android before R, we recommend NOT calling
* this method from a data callback. See this tech note for more details.
* https://github.com/google/oboe/blob/main/docs/notes/rlsbuffer.md
*
* See
* @param clockId the type of clock to use e.g. CLOCK_MONOTONIC
* @return a FrameTimestamp containing the position and time at which a particular audio frame
* entered or left the audio processing pipeline, or an error if the operation failed.
@ -372,11 +387,6 @@ public:
return nullptr;
}
/**
* Launch a thread that will stop the stream.
*/
void launchStopThread();
/**
* Update mFramesWritten.
* For internal use only.
@ -393,12 +403,25 @@ public:
* Swap old callback for new callback.
* This not atomic.
* This should only be used internally.
* @param streamCallback
* @return previous streamCallback
* @param dataCallback
* @return previous dataCallback
*/
AudioStreamCallback *swapCallback(AudioStreamCallback *streamCallback) {
AudioStreamCallback *previousCallback = mStreamCallback;
mStreamCallback = streamCallback;
AudioStreamDataCallback *swapDataCallback(AudioStreamDataCallback *dataCallback) {
AudioStreamDataCallback *previousCallback = mDataCallback;
mDataCallback = dataCallback;
return previousCallback;
}
/*
* Swap old callback for new callback.
* This not atomic.
* This should only be used internally.
* @param errorCallback
* @return previous errorCallback
*/
AudioStreamErrorCallback *swapErrorCallback(AudioStreamErrorCallback *errorCallback) {
AudioStreamErrorCallback *previousCallback = mErrorCallback;
mErrorCallback = errorCallback;
return previousCallback;
}
@ -419,6 +442,36 @@ public:
ResultWithValue<int32_t> waitForAvailableFrames(int32_t numFrames,
int64_t timeoutNanoseconds);
/**
* @return last result passed from an error callback
*/
virtual oboe::Result getLastErrorCallbackResult() const {
return mErrorCallbackResult;
}
int32_t getDelayBeforeCloseMillis() const {
return mDelayBeforeCloseMillis;
}
/**
* Set the time to sleep before closing the internal stream.
*
* Sometimes a callback can occur shortly after a stream has been stopped and
* even after a close! If the stream has been closed then the callback
* might access memory that has been freed, which could cause a crash.
* This seems to be more likely in Android P or earlier.
* But it can also occur in later versions. By sleeping, we give time for
* the callback threads to finish.
*
* Note that this only has an effect when OboeGlobals::areWorkaroundsEnabled() is true.
*
* @param delayBeforeCloseMillis time to sleep before close.
*/
void setDelayBeforeCloseMillis(int32_t delayBeforeCloseMillis) {
mDelayBeforeCloseMillis = delayBeforeCloseMillis;
}
protected:
/**
@ -480,6 +533,21 @@ protected:
mDataCallbackEnabled = enabled;
}
/**
* This should only be called as a stream is being opened.
* Otherwise we might override setDelayBeforeCloseMillis().
*/
void calculateDefaultDelayBeforeCloseMillis();
/**
* Try to avoid a race condition when closing.
*/
void sleepBeforeClose() {
if (mDelayBeforeCloseMillis > 0) {
usleep(mDelayBeforeCloseMillis * 1000);
}
}
/*
* Set a weak_ptr to this stream from the shared_ptr so that we can
* later use a shared_ptr in the error callback.
@ -515,15 +583,27 @@ protected:
std::mutex mLock; // for synchronizing start/stop/close
oboe::Result mErrorCallbackResult = oboe::Result::OK;
/**
* Number of frames which will be copied to/from the audio device in a single read/write
* operation
*/
int32_t mFramesPerBurst = kUnspecified;
// Time to sleep in order to prevent a race condition with a callback after a close().
// Two milliseconds may be enough but 10 msec is even safer.
static constexpr int kMinDelayBeforeCloseMillis = 10;
int32_t mDelayBeforeCloseMillis = kMinDelayBeforeCloseMillis;
private:
// Log the scheduler if it changes.
void checkScheduler();
int mPreviousScheduler = -1;
std::atomic<bool> mDataCallbackEnabled{false};
std::atomic<bool> mErrorCallbackCalled{false};
};
/**

View File

@ -18,6 +18,7 @@
#define OBOE_STREAM_BASE_H_
#include <memory>
#include <string>
#include "oboe/AudioStreamCallback.h"
#include "oboe/Definitions.h"
@ -62,9 +63,14 @@ public:
int32_t getSampleRate() const { return mSampleRate; }
/**
* @return the number of frames in each callback or kUnspecified.
* @deprecated use `getFramesPerDataCallback` instead.
*/
int32_t getFramesPerCallback() const { return mFramesPerCallback; }
int32_t getFramesPerCallback() const { return getFramesPerDataCallback(); }
/**
* @return the number of frames in each data callback or kUnspecified.
*/
int32_t getFramesPerDataCallback() const { return mFramesPerCallback; }
/**
* @return the audio sample format (e.g. Float or I16)
@ -100,10 +106,35 @@ public:
int32_t getDeviceId() const { return mDeviceId; }
/**
* @return the callback object for this stream, if set.
* For internal use only.
* @return the data callback object for this stream, if set.
*/
AudioStreamCallback* getCallback() const {
return mStreamCallback;
AudioStreamDataCallback *getDataCallback() const {
return mDataCallback;
}
/**
* For internal use only.
* @return the error callback object for this stream, if set.
*/
AudioStreamErrorCallback *getErrorCallback() const {
return mErrorCallback;
}
/**
* @return true if a data callback was set for this stream
*/
bool isDataCallbackSpecified() const {
return mDataCallback != nullptr;
}
/**
* Note that if the app does not set an error callback then a
* default one may be provided.
* @return true if an error callback was set for this stream
*/
bool isErrorCallbackSpecified() const {
return mErrorCallback != nullptr;
}
/**
@ -147,10 +178,22 @@ public:
return mSampleRateConversionQuality;
}
protected:
/**
* @return the stream's channel mask.
*/
ChannelMask getChannelMask() const {
return mChannelMask;
}
protected:
/** The callback which will be fired when new data is ready to be read/written. **/
AudioStreamDataCallback *mDataCallback = nullptr;
std::shared_ptr<AudioStreamDataCallback> mSharedDataCallback;
/** The callback which will be fired when an error or a disconnect occurs. **/
AudioStreamErrorCallback *mErrorCallback = nullptr;
std::shared_ptr<AudioStreamErrorCallback> mSharedErrorCallback;
/** The callback which will be fired when new data is ready to be read/written **/
AudioStreamCallback *mStreamCallback = nullptr;
/** Number of audio frames which will be requested in each callback */
int32_t mFramesPerCallback = kUnspecified;
/** Stream channel count */
@ -163,11 +206,8 @@ protected:
int32_t mBufferCapacityInFrames = kUnspecified;
/** Stream buffer size specified as a number of audio frames */
int32_t mBufferSizeInFrames = kUnspecified;
/**
* Number of frames which will be copied to/from the audio device in a single read/write
* operation
*/
int32_t mFramesPerBurst = kUnspecified;
/** Stream channel mask. Only active on Android 32+ */
ChannelMask mChannelMask = ChannelMask::Unspecified;
/** Stream sharing mode */
SharingMode mSharingMode = SharingMode::Shared;
@ -189,12 +229,44 @@ protected:
/** Stream session ID allocation strategy. Only active on Android 28+ */
SessionId mSessionId = SessionId::None;
/** Control the name of the package creating the stream. Only active on Android 31+ */
std::string mPackageName;
/** Control the attribution tag of the context creating the stream. Only active on Android 31+ */
std::string mAttributionTag;
// Control whether Oboe can convert channel counts to achieve optimal results.
bool mChannelConversionAllowed = false;
// Control whether Oboe can convert data formats to achieve optimal results.
bool mFormatConversionAllowed = false;
// Control whether and how Oboe can convert sample rates to achieve optimal results.
SampleRateConversionQuality mSampleRateConversionQuality = SampleRateConversionQuality::None;
/** Validate stream parameters that might not be checked in lower layers */
virtual Result isValidConfig() {
switch (mFormat) {
case AudioFormat::Unspecified:
case AudioFormat::I16:
case AudioFormat::Float:
case AudioFormat::I24:
case AudioFormat::I32:
break;
default:
return Result::ErrorInvalidFormat;
}
switch (mSampleRateConversionQuality) {
case SampleRateConversionQuality::None:
case SampleRateConversionQuality::Fastest:
case SampleRateConversionQuality::Low:
case SampleRateConversionQuality::Medium:
case SampleRateConversionQuality::High:
case SampleRateConversionQuality::Best:
return Result::OK;
default:
return Result::ErrorIllegalArgument;
}
}
};
} // namespace oboe

View File

@ -19,6 +19,7 @@
#include "oboe/Definitions.h"
#include "oboe/AudioStreamBase.h"
#include "oboe/Utilities.h"
#include "ResultWithValue.h"
namespace oboe {
@ -42,12 +43,36 @@ public:
*
* Default is kUnspecified. If the value is unspecified then
* the application should query for the actual value after the stream is opened.
*
* As the channel count here may be different from the corresponding channel count of
* provided channel mask used in setChannelMask(). The last called will be respected
* if this function and setChannelMask() are called.
*/
AudioStreamBuilder *setChannelCount(int channelCount) {
mChannelCount = channelCount;
mChannelMask = ChannelMask::Unspecified;
return this;
}
/**
* Request a specific channel mask.
*
* Default is kUnspecified. If the value is unspecified then the application
* should query for the actual value after the stream is opened.
*
* As the corresponding channel count of provided channel mask here may be different
* from the channel count used in setChannelCount(). The last called will be respected
* if this function and setChannelCount() are called.
*
* As the setChannelMask API is available on Android 32+, this call will only take effects
* on Android 32+.
*/
AudioStreamBuilder *setChannelMask(ChannelMask channelMask) {
mChannelMask = channelMask;
mChannelCount = getChannelCountFromChannelMask(channelMask);
return this;
}
/**
* Request the direction for a stream. The default is Direction::Output.
*
@ -74,6 +99,13 @@ public:
return this;
}
/**
* @deprecated use `setFramesPerDataCallback` instead.
*/
AudioStreamBuilder *setFramesPerCallback(int framesPerCallback) {
return setFramesPerDataCallback(framesPerCallback);
}
/**
* Request a specific number of frames for the data callback.
*
@ -85,10 +117,18 @@ public:
* the callbacks. But if your application is, for example, doing FFTs or other block
* oriented operations, then call this function to get the sizes you need.
*
* Calling setFramesPerDataCallback() does not guarantee anything about timing.
* This just collects the data into a the number of frames that your app requires.
* We encourage leaving this unspecified in most cases.
*
* If this number is larger than the burst size, some bursts will not receive a callback.
* If this number is smaller than the burst size, there may be multiple callbacks in a single
* burst.
*
* @param framesPerCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setFramesPerCallback(int framesPerCallback) {
AudioStreamBuilder *setFramesPerDataCallback(int framesPerCallback) {
mFramesPerCallback = framesPerCallback;
return this;
}
@ -198,10 +238,11 @@ public:
/**
* Set the intended use case for the stream.
* Set the intended use case for an output stream.
*
* The system will use this information to optimize the behavior of the stream.
* This could, for example, affect how volume and focus is handled for the stream.
* The usage is ignored for input streams.
*
* The default, if you do not call this function, is Usage::Media.
*
@ -215,10 +256,11 @@ public:
}
/**
* Set the type of audio data that the stream will carry.
* Set the type of audio data that an output stream will carry.
*
* The system will use this information to optimize the behavior of the stream.
* This could, for example, affect whether a stream is paused when a notification occurs.
* The contentType is ignored for input streams.
*
* The default, if you do not call this function, is ContentType::Music.
*
@ -286,11 +328,14 @@ public:
* In most cases, the primary device will be the appropriate device to use, and the
* deviceId can be left kUnspecified.
*
* On Android, for example, the ID could be obtained from the Java AudioManager.
* AudioManager.getDevices() returns an array of AudioDeviceInfo[], which contains
* a getId() method (as well as other type information), that should be passed
* to this method.
* The ID could be obtained from the Java AudioManager.
* AudioManager.getDevices() returns an array of AudioDeviceInfo,
* which contains a getId() method. That ID can be passed to this function.
*
* It is possible that you may not get the device that you requested.
* So if it is important to you, you should call
* stream->getDeviceId() after the stream is opened to
* verify the actual ID.
*
* Note that when using OpenSL ES, this will be ignored and the created
* stream will have deviceId kUnspecified.
@ -303,9 +348,86 @@ public:
return this;
}
/**
* Specifies an object to handle data related callbacks from the underlying API.
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong>
*
* We pass a shared_ptr so that the sharedDataCallback object cannot be deleted
* before the stream is deleted.
*
* @param dataCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setDataCallback(std::shared_ptr<AudioStreamDataCallback> sharedDataCallback) {
// Use this raw pointer in the rest of the code to retain backwards compatibility.
mDataCallback = sharedDataCallback.get();
// Hold a shared_ptr to protect the raw pointer for the lifetime of the stream.
mSharedDataCallback = sharedDataCallback;
return this;
}
/**
* Pass a raw pointer to a data callback. This is not recommended because the dataCallback
* object might get deleted by the app while it is being used.
*
* @deprecated Call setDataCallback(std::shared_ptr<AudioStreamDataCallback>) instead.
* @param dataCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setDataCallback(AudioStreamDataCallback *dataCallback) {
mDataCallback = dataCallback;
mSharedDataCallback = nullptr;
return this;
}
/**
* Specifies an object to handle error related callbacks from the underlying API.
* This can occur when a stream is disconnected because a headset is plugged in or unplugged.
* It can also occur if the audio service fails or if an exclusive stream is stolen by
* another stream.
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong>
*
* <strong>When an error callback occurs, the associated stream must be stopped and closed
* in a separate thread.</strong>
*
* We pass a shared_ptr so that the errorCallback object cannot be deleted before the stream is deleted.
* If the stream was created using a shared_ptr then the stream cannot be deleted before the
* error callback has finished running.
*
* @param sharedErrorCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setErrorCallback(std::shared_ptr<AudioStreamErrorCallback> sharedErrorCallback) {
// Use this raw pointer in the rest of the code to retain backwards compatibility.
mErrorCallback = sharedErrorCallback.get();
// Hold a shared_ptr to protect the raw pointer for the lifetime of the stream.
mSharedErrorCallback = sharedErrorCallback;
return this;
}
/**
* Pass a raw pointer to an error callback. This is not recommended because the errorCallback
* object might get deleted by the app while it is being used.
*
* @deprecated Call setErrorCallback(std::shared_ptr<AudioStreamErrorCallback>) instead.
* @param errorCallback
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setErrorCallback(AudioStreamErrorCallback *errorCallback) {
mErrorCallback = errorCallback;
mSharedErrorCallback = nullptr;
return this;
}
/**
* Specifies an object to handle data or error related callbacks from the underlying API.
*
* This is the equivalent of calling both setDataCallback() and setErrorCallback().
*
* <strong>Important: See AudioStreamCallback for restrictions on what may be called
* from the callback methods.</strong>
*
@ -325,7 +447,9 @@ public:
* @return pointer to the builder so calls can be chained
*/
AudioStreamBuilder *setCallback(AudioStreamCallback *streamCallback) {
mStreamCallback = streamCallback;
// Use the same callback object for both, dual inheritance.
mDataCallback = streamCallback;
mErrorCallback = streamCallback;
return this;
}
@ -336,7 +460,7 @@ public:
* On some devices, mono streams might be broken, so a stereo stream might be opened
* and converted to mono.
*
* Default is true.
* Default is false.
*/
AudioStreamBuilder *setChannelConversionAllowed(bool allowed) {
mChannelConversionAllowed = allowed;
@ -344,11 +468,11 @@ public:
}
/**
* If true then Oboe might convert data formats to achieve optimal results.
* If true then Oboe might convert data formats to achieve optimal results.
* On some versions of Android, for example, a float stream could not get a
* low latency data path. So an I16 stream might be opened and converted to float.
*
* Default is true.
* Default is false.
*/
AudioStreamBuilder *setFormatConversionAllowed(bool allowed) {
mFormatConversionAllowed = allowed;
@ -371,6 +495,43 @@ public:
return this;
}
/**
* Declare the name of the package creating the stream.
*
* This is usually {@code Context#getPackageName()}.
*
* The default, if you do not call this function, is a random package in the calling uid.
* The vast majority of apps have only one package per calling UID.
* If an invalid package name is set, input streams may not be given permission to
* record when started.
*
* The package name is usually the applicationId in your app's build.gradle file.
*
* Available since API level 31.
*
* @param packageName packageName of the calling app.
*/
AudioStreamBuilder *setPackageName(std::string packageName) {
mPackageName = packageName;
return this;
}
/**
* Declare the attribution tag of the context creating the stream.
*
* This is usually {@code Context#getAttributionTag()}.
*
* The default, if you do not call this function, is null.
*
* Available since API level 31.
*
* @param attributionTag attributionTag of the calling context.
*/
AudioStreamBuilder *setAttributionTag(std::string attributionTag) {
mAttributionTag = attributionTag;
return this;
}
/**
* @return true if AAudio will be used based on the current settings.
*/
@ -382,7 +543,8 @@ public:
/**
* Create and open a stream object based on the current settings.
*
* The caller owns the pointer to the AudioStream object.
* The caller owns the pointer to the AudioStream object
* and must delete it when finished.
*
* @deprecated Use openStream(std::shared_ptr<oboe::AudioStream> &stream) instead.
* @param stream pointer to a variable to receive the stream address
@ -408,6 +570,8 @@ public:
* The caller must create a unique ptr, and pass by reference so it can be
* modified to point to an opened stream. The caller owns the unique ptr,
* and it will be automatically closed and deleted when going out of scope.
*
* @deprecated Use openStream(std::shared_ptr<oboe::AudioStream> &stream) instead.
* @param stream Reference to the ManagedStream (uniqueptr) used to keep track of stream
* @return OBOE_OK if successful or a negative error code.
*/

View File

@ -24,15 +24,16 @@ namespace oboe {
class AudioStream;
/**
* AudioStreamCallback defines a callback interface for:
*
* 1) moving data to/from an audio stream using `onAudioReady`
* AudioStreamDataCallback defines a callback interface for
* moving data to/from an audio stream using `onAudioReady`
* 2) being alerted when a stream has an error using `onError*` methods
*
* It is used with AudioStreamBuilder::setDataCallback().
*/
class AudioStreamCallback {
class AudioStreamDataCallback {
public:
virtual ~AudioStreamCallback() = default;
virtual ~AudioStreamDataCallback() = default;
/**
* A buffer is ready for processing.
@ -75,21 +76,68 @@ public:
* If you need to move data, eg. MIDI commands, in or out of the callback function then
* we recommend the use of non-blocking techniques such as an atomic FIFO.
*
* @param oboeStream pointer to the associated stream
* @param audioStream pointer to the associated stream
* @param audioData buffer containing input data or a place to put output data
* @param numFrames number of frames to be processed
* @return DataCallbackResult::Continue or DataCallbackResult::Stop
*/
virtual DataCallbackResult onAudioReady(
AudioStream *oboeStream,
AudioStream *audioStream,
void *audioData,
int32_t numFrames) = 0;
};
/**
* AudioStreamErrorCallback defines a callback interface for
* being alerted when a stream has an error or is disconnected
* using `onError*` methods.
*
* Note: This callback is only fired when an AudioStreamCallback is set.
* If you use AudioStream::write() you have to evaluate the return codes of
* AudioStream::write() to notice errors in the stream.
*
* It is used with AudioStreamBuilder::setErrorCallback().
*/
class AudioStreamErrorCallback {
public:
virtual ~AudioStreamErrorCallback() = default;
/**
* This will be called when an error occurs on a stream or when the stream is disconnected.
* This will be called before other `onError` methods when an error occurs on a stream,
* such as when the stream is disconnected.
*
* Note that this will be called on a different thread than the onAudioReady() thread.
* This thread will be created by Oboe.
* It can be used to override and customize the normal error processing.
* Use of this method is considered an advanced technique.
* It might, for example, be used if an app want to use a high level lock when
* closing and reopening a stream.
* Or it might be used when an app want to signal a management thread that handles
* all of the stream state.
*
* If this method returns false it indicates that the stream has *not been stopped and closed
* by the application. In this case it will be stopped by Oboe in the following way:
* onErrorBeforeClose() will be called, then the stream will be closed and onErrorAfterClose()
* will be closed.
*
* If this method returns true it indicates that the stream *has* been stopped and closed
* by the application and Oboe will not do this.
* In that case, the app MUST stop() and close() the stream.
*
* This method will be called on a thread created by Oboe.
*
* @param audioStream pointer to the associated stream
* @param error
* @return true if the stream has been stopped and closed, false if not
*/
virtual bool onError(AudioStream* /* audioStream */, Result /* error */) {
return false;
}
/**
* This will be called when an error occurs on a stream,
* such as when the stream is disconnected,
* and if onError() returns false (indicating that the error has not already been handled).
*
* Note that this will be called on a thread created by Oboe.
*
* The underlying stream will already be stopped by Oboe but not yet closed.
* So the stream can be queried.
@ -97,27 +145,49 @@ public:
* Do not close or delete the stream in this method because it will be
* closed after this method returns.
*
* @param oboeStream pointer to the associated stream
* @param audioStream pointer to the associated stream
* @param error
*/
virtual void onErrorBeforeClose(AudioStream* /* oboeStream */, Result /* error */) {}
virtual void onErrorBeforeClose(AudioStream* /* audioStream */, Result /* error */) {}
/**
* This will be called when an error occurs on a stream or when the stream is disconnected.
* This will be called when an error occurs on a stream,
* such as when the stream is disconnected,
* and if onError() returns false (indicating that the error has not already been handled).
*
* The underlying AAudio or OpenSL ES stream will already be stopped AND closed by Oboe.
* So the underlying stream cannot be referenced.
* But you can still query most parameters.
*
* This callback could be used to reopen a new stream on another device.
* You can safely delete the old AudioStream in this method.
*
* @param oboeStream pointer to the associated stream
* @param audioStream pointer to the associated stream
* @param error
*/
virtual void onErrorAfterClose(AudioStream* /* oboeStream */, Result /* error */) {}
virtual void onErrorAfterClose(AudioStream* /* audioStream */, Result /* error */) {}
};
/**
* AudioStreamCallback defines a callback interface for:
*
* 1) moving data to/from an audio stream using `onAudioReady`
* 2) being alerted when a stream has an error using `onError*` methods
*
* It is used with AudioStreamBuilder::setCallback().
*
* It combines the interfaces defined by AudioStreamDataCallback and AudioStreamErrorCallback.
* This was the original callback object. We now recommend using the individual interfaces
* and using setDataCallback() and setErrorCallback().
*
* @deprecated Use `AudioStreamDataCallback` and `AudioStreamErrorCallback` instead
*/
class AudioStreamCallback : public AudioStreamDataCallback,
public AudioStreamErrorCallback {
public:
virtual ~AudioStreamCallback() = default;
};
} // namespace oboe
#endif //OBOE_STREAM_CALLBACK_H

View File

@ -17,7 +17,6 @@
#ifndef OBOE_DEFINITIONS_H
#define OBOE_DEFINITIONS_H
#include <cstdint>
#include <type_traits>
@ -108,9 +107,36 @@ namespace oboe {
I16 = 1, // AAUDIO_FORMAT_PCM_I16,
/**
* Single precision floating points.
* Single precision floating point.
*
* This is the recommended format for most applications.
* But note that the use of Float may prevent the opening of
* a low-latency input path on OpenSL ES or Legacy AAudio streams.
*/
Float = 2, // AAUDIO_FORMAT_PCM_FLOAT,
/**
* Signed 24-bit integers, packed into 3 bytes.
*
* Note that the use of this format does not guarantee that
* the full precision will be provided. The underlying device may
* be using I16 format.
*
* Added in API 31 (S).
*/
I24 = 3, // AAUDIO_FORMAT_PCM_I24_PACKED
/**
* Signed 32-bit integers.
*
* Note that the use of this format does not guarantee that
* the full precision will be provided. The underlying device may
* be using I16 format.
*
* Added in API 31 (S).
*/
I32 = 4, // AAUDIO_FORMAT_PCM_I32
};
/**
@ -158,7 +184,7 @@ namespace oboe {
Reserved8,
Reserved9,
Reserved10,
ErrorClosed,
ErrorClosed = -869,
};
/**
@ -218,11 +244,14 @@ namespace oboe {
/**
* Use OpenSL ES.
* Note that OpenSL ES is deprecated in Android 13, API 30 and above.
*/
OpenSLES,
/**
* Try to use AAudio. Fail if unavailable.
* AAudio was first supported in Android 8, API 26 and above.
* It is only recommended for API 27 and above.
*/
AAudio
};
@ -242,8 +271,17 @@ namespace oboe {
* This may be implemented using bilinear interpolation.
*/
Fastest,
/**
* Low quality conversion with 8 taps.
*/
Low,
/**
* Medium quality conversion with 16 taps.
*/
Medium,
/**
* High quality conversion with 32 taps.
*/
High,
/**
* Highest quality conversion, which may be expensive in terms of CPU.
@ -456,6 +494,160 @@ namespace oboe {
Stereo = 2,
};
/**
* The channel mask of the audio stream. The underlying type is `uint32_t`.
* Use of this enum is convenient.
*
* ChannelMask::Unspecified means this is not specified.
* The rest of the enums are channel position masks.
* Use the combinations of the channel position masks defined below instead of
* using those values directly.
*/
enum class ChannelMask : uint32_t { // aaudio_channel_mask_t
Unspecified = kUnspecified,
FrontLeft = 1 << 0,
FrontRight = 1 << 1,
FrontCenter = 1 << 2,
LowFrequency = 1 << 3,
BackLeft = 1 << 4,
BackRight = 1 << 5,
FrontLeftOfCenter = 1 << 6,
FrontRightOfCenter = 1 << 7,
BackCenter = 1 << 8,
SideLeft = 1 << 9,
SideRight = 1 << 10,
TopCenter = 1 << 11,
TopFrontLeft = 1 << 12,
TopFrontCenter = 1 << 13,
TopFrontRight = 1 << 14,
TopBackLeft = 1 << 15,
TopBackCenter = 1 << 16,
TopBackRight = 1 << 17,
TopSideLeft = 1 << 18,
TopSideRight = 1 << 19,
BottomFrontLeft = 1 << 20,
BottomFrontCenter = 1 << 21,
BottomFrontRight = 1 << 22,
LowFrequency2 = 1 << 23,
FrontWideLeft = 1 << 24,
FrontWideRight = 1 << 25,
Mono = FrontLeft,
Stereo = FrontLeft |
FrontRight,
CM2Point1 = FrontLeft |
FrontRight |
LowFrequency,
Tri = FrontLeft |
FrontRight |
FrontCenter,
TriBack = FrontLeft |
FrontRight |
BackCenter,
CM3Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency,
CM2Point0Point2 = FrontLeft |
FrontRight |
TopSideLeft |
TopSideRight,
CM2Point1Point2 = CM2Point0Point2 |
LowFrequency,
CM3Point0Point2 = FrontLeft |
FrontRight |
FrontCenter |
TopSideLeft |
TopSideRight,
CM3Point1Point2 = CM3Point0Point2 |
LowFrequency,
Quad = FrontLeft |
FrontRight |
BackLeft |
BackRight,
QuadSide = FrontLeft |
FrontRight |
SideLeft |
SideRight,
Surround = FrontLeft |
FrontRight |
FrontCenter |
BackCenter,
Penta = Quad |
FrontCenter,
// aka 5Point1Back
CM5Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
BackLeft |
BackRight,
CM5Point1Side = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
SideLeft |
SideRight,
CM6Point1 = FrontLeft |
FrontRight |
FrontCenter |
LowFrequency |
BackLeft |
BackRight |
BackCenter,
CM7Point1 = CM5Point1 |
SideLeft |
SideRight,
CM5Point1Point2 = CM5Point1 |
TopSideLeft |
TopSideRight,
CM5Point1Point4 = CM5Point1 |
TopFrontLeft |
TopFrontRight |
TopBackLeft |
TopBackRight,
CM7Point1Point2 = CM7Point1 |
TopSideLeft |
TopSideRight,
CM7Point1Point4 = CM7Point1 |
TopFrontLeft |
TopFrontRight |
TopBackLeft |
TopBackRight,
CM9Point1Point4 = CM7Point1Point4 |
FrontWideLeft |
FrontWideRight,
CM9Point1Point6 = CM9Point1Point4 |
TopSideLeft |
TopSideRight,
FrontBack = FrontCenter |
BackCenter,
};
/**
* On API 16 to 26 OpenSL ES will be used. When using OpenSL ES the optimal values for sampleRate and
* framesPerBurst are not known by the native code.

View File

@ -17,19 +17,35 @@
#ifndef OBOE_FIFOPROCESSOR_H
#define OBOE_FIFOPROCESSOR_H
#include <unistd.h>
#include <sys/types.h>
#include <memory>
#include <stdint.h>
#include "common/OboeDebug.h"
#include "FifoControllerBase.h"
#include "oboe/Definitions.h"
#include "oboe/FifoControllerBase.h"
namespace oboe {
class FifoBuffer {
public:
/**
* Construct a `FifoBuffer`.
*
* @param bytesPerFrame amount of bytes for one frame
* @param capacityInFrames the capacity of frames in fifo
*/
FifoBuffer(uint32_t bytesPerFrame, uint32_t capacityInFrames);
/**
* Construct a `FifoBuffer`.
* To be used if the storage allocation is done outside of FifoBuffer.
*
* @param bytesPerFrame amount of bytes for one frame
* @param capacityInFrames capacity of frames in fifo
* @param readCounterAddress address of read counter
* @param writeCounterAddress address of write counter
* @param dataStorageAddress address of storage
*/
FifoBuffer(uint32_t bytesPerFrame,
uint32_t capacityInFrames,
std::atomic<uint64_t> *readCounterAddress,
@ -38,18 +54,36 @@ public:
~FifoBuffer();
/**
* Convert a number of frames in bytes.
*
* @return number of bytes
*/
int32_t convertFramesToBytes(int32_t frames);
/**
* Read framesToRead or, if not enough, then read as many as are available.
*
* @param destination
* @param framesToRead number of frames requested
* @return number of frames actually read
*/
int32_t read(void *destination, int32_t framesToRead);
/**
* Write framesToWrite or, if too enough, then write as many as the fifo are not empty.
*
* @param destination
* @param framesToWrite number of frames requested
* @return number of frames actually write
*/
int32_t write(const void *source, int32_t framesToWrite);
/**
* Get the buffer capacity in frames.
*
* @return number of frames
*/
uint32_t getBufferCapacityInFrames() const;
/**
@ -62,25 +96,56 @@ public:
*/
int32_t readNow(void *destination, int32_t numFrames);
/**
* Get the number of frames in the fifo.
*
* @return number of frames actually in the buffer
*/
uint32_t getFullFramesAvailable() {
return mFifo->getFullFramesAvailable();
}
/**
* Get the amount of bytes per frame.
*
* @return number of bytes per frame
*/
uint32_t getBytesPerFrame() const {
return mBytesPerFrame;
}
/**
* Get the position of read counter.
*
* @return position of read counter
*/
uint64_t getReadCounter() const {
return mFifo->getReadCounter();
}
/**
* Set the position of read counter.
*
* @param n position of read counter
*/
void setReadCounter(uint64_t n) {
mFifo->setReadCounter(n);
}
/**
* Get the position of write counter.
*
* @return position of write counter
*/
uint64_t getWriteCounter() {
return mFifo->getWriteCounter();
}
/**
* Set the position of write counter.
*
* @param n position of write counter
*/
void setWriteCounter(uint64_t n) {
mFifo->setWriteCounter(n);
}

View File

@ -18,7 +18,6 @@
#define NATIVEOBOE_FIFOCONTROLLERBASE_H
#include <stdint.h>
#include <sys/types.h>
namespace oboe {
@ -35,9 +34,11 @@ namespace oboe {
class FifoControllerBase {
public:
/**
* @param totalFrames capacity of the circular buffer in frames.
*/
/**
* Construct a `FifoControllerBase`.
*
* @param totalFrames capacity of the circular buffer in frames
*/
FifoControllerBase(uint32_t totalFrames);
virtual ~FifoControllerBase() = default;
@ -46,35 +47,53 @@ public:
* The frames available to read will be calculated from the read and write counters.
* The result will be clipped to the capacity of the buffer.
* If the buffer has underflowed then this will return zero.
*
* @return number of valid frames available to read.
*/
uint32_t getFullFramesAvailable() const;
/**
/**
* The index in a circular buffer of the next frame to read.
*
* @return read index position
*/
uint32_t getReadIndex() const;
/**
* @param numFrames number of frames to advance the read index
*/
/**
* Advance read index from a number of frames.
* Equivalent of incrementReadCounter(numFrames).
*
* @param numFrames number of frames to advance the read index
*/
void advanceReadIndex(uint32_t numFrames);
/**
* @return maximum number of frames that can be written without exceeding the threshold.
*/
/**
* Get the number of frame that are not written yet.
*
* @return maximum number of frames that can be written without exceeding the threshold
*/
uint32_t getEmptyFramesAvailable() const;
/**
* The index in a circular buffer of the next frame to write.
*/
* The index in a circular buffer of the next frame to write.
*
* @return index of the next frame to write
*/
uint32_t getWriteIndex() const;
/**
/**
* Advance write index from a number of frames.
* Equivalent of incrementWriteCounter(numFrames).
*
* @param numFrames number of frames to advance the write index
*/
void advanceWriteIndex(uint32_t numFrames);
/**
* Get the frame capacity of the fifo.
*
* @return frame capacity
*/
uint32_t getFrameCapacity() const { return mTotalFrames; }
virtual uint64_t getReadCounter() const = 0;

View File

@ -33,5 +33,7 @@
#include "oboe/Utilities.h"
#include "oboe/Version.h"
#include "oboe/StabilizedCallback.h"
#include "oboe/FifoBuffer.h"
#include "oboe/OboeExtensions.h"
#endif //OBOE_OBOE_H

View File

@ -0,0 +1,64 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_EXTENSIONS_
#define OBOE_EXTENSIONS_
#include <stdint.h>
#include "oboe/Definitions.h"
#include "oboe/AudioStream.h"
namespace oboe {
/**
* The definitions below are only for testing.
* They are not recommended for use in an application.
* They may change or be removed at any time.
*/
class OboeExtensions {
public:
/**
* @returns true if the device supports AAudio MMAP
*/
static bool isMMapSupported();
/**
* @returns true if the AAudio MMAP data path can be selected
*/
static bool isMMapEnabled();
/**
* Controls whether the AAudio MMAP data path can be selected when opening a stream.
* It has no effect after the stream has been opened.
* It only affects the application that calls it. Other apps are not affected.
*
* @param enabled
* @return 0 or a negative error code
*/
static int32_t setMMapEnabled(bool enabled);
/**
* @param oboeStream
* @return true if the AAudio MMAP data path is used on the stream
*/
static bool isMMapUsed(oboe::AudioStream *oboeStream);
};
} // namespace oboe
#endif // OBOE_LATENCY_TUNER_

View File

@ -60,7 +60,7 @@ private:
#if defined(__i386__) || defined(__x86_64__)
#define cpu_relax() asm volatile("rep; nop" ::: "memory");
#elif defined(__arm__) || defined(__mips__)
#elif defined(__arm__) || defined(__mips__) || defined(__riscv)
#define cpu_relax() asm volatile("":::"memory")
#elif defined(__aarch64__)

View File

@ -82,6 +82,8 @@ int getPropertyInteger(const char * name, int defaultValue);
*/
int getSdkVersion();
int getChannelCountFromChannelMask(ChannelMask channelMask);
} // namespace oboe
#endif //OBOE_UTILITIES_H

View File

@ -34,10 +34,10 @@
#define OBOE_VERSION_MAJOR 1
// Type: 8-bit unsigned int. Min value: 0 Max value: 255. See below for description.
#define OBOE_VERSION_MINOR 4
#define OBOE_VERSION_MINOR 7
// Type: 16-bit unsigned int. Min value: 0 Max value: 65535. See below for description.
#define OBOE_VERSION_PATCH 2
#define OBOE_VERSION_PATCH 0
#define OBOE_STRINGIFY(x) #x
#define OBOE_TOSTRING(x) OBOE_STRINGIFY(x)

View File

@ -0,0 +1,179 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_AAUDIO_EXTENSIONS_H
#define OBOE_AAUDIO_EXTENSIONS_H
#include <dlfcn.h>
#include <stdint.h>
#include <sys/system_properties.h>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "AAudioLoader.h"
namespace oboe {
#define LIB_AAUDIO_NAME "libaaudio.so"
#define FUNCTION_IS_MMAP "AAudioStream_isMMapUsed"
#define FUNCTION_SET_MMAP_POLICY "AAudio_setMMapPolicy"
#define FUNCTION_GET_MMAP_POLICY "AAudio_getMMapPolicy"
#define AAUDIO_ERROR_UNAVAILABLE static_cast<aaudio_result_t>(Result::ErrorUnavailable)
typedef struct AAudioStreamStruct AAudioStream;
/**
* Call some AAudio test routines that are not part of the normal API.
*/
class AAudioExtensions {
public:
AAudioExtensions() {
int32_t policy = getIntegerProperty("aaudio.mmap_policy", 0);
mMMapSupported = isPolicyEnabled(policy);
policy = getIntegerProperty("aaudio.mmap_exclusive_policy", 0);
mMMapExclusiveSupported = isPolicyEnabled(policy);
}
static bool isPolicyEnabled(int32_t policy) {
return (policy == AAUDIO_POLICY_AUTO || policy == AAUDIO_POLICY_ALWAYS);
}
static AAudioExtensions &getInstance() {
static AAudioExtensions instance;
return instance;
}
bool isMMapUsed(oboe::AudioStream *oboeStream) {
AAudioStream *aaudioStream = (AAudioStream *) oboeStream->getUnderlyingStream();
return isMMapUsed(aaudioStream);
}
bool isMMapUsed(AAudioStream *aaudioStream) {
if (loadSymbols()) return false;
if (mAAudioStream_isMMap == nullptr) return false;
return mAAudioStream_isMMap(aaudioStream);
}
/**
* Controls whether the MMAP data path can be selected when opening a stream.
* It has no effect after the stream has been opened.
* It only affects the application that calls it. Other apps are not affected.
*
* @param enabled
* @return 0 or a negative error code
*/
int32_t setMMapEnabled(bool enabled) {
if (loadSymbols()) return AAUDIO_ERROR_UNAVAILABLE;
if (mAAudio_setMMapPolicy == nullptr) return false;
return mAAudio_setMMapPolicy(enabled ? AAUDIO_POLICY_AUTO : AAUDIO_POLICY_NEVER);
}
bool isMMapEnabled() {
if (loadSymbols()) return false;
if (mAAudio_getMMapPolicy == nullptr) return false;
int32_t policy = mAAudio_getMMapPolicy();
return isPolicyEnabled(policy);
}
bool isMMapSupported() {
return mMMapSupported;
}
bool isMMapExclusiveSupported() {
return mMMapExclusiveSupported;
}
private:
enum {
AAUDIO_POLICY_NEVER = 1,
AAUDIO_POLICY_AUTO,
AAUDIO_POLICY_ALWAYS
};
typedef int32_t aaudio_policy_t;
int getIntegerProperty(const char *name, int defaultValue) {
int result = defaultValue;
char valueText[PROP_VALUE_MAX] = {0};
if (__system_property_get(name, valueText) != 0) {
result = atoi(valueText);
}
return result;
}
/**
* Load the function pointers.
* This can be called multiple times.
* It should only be called from one thread.
*
* @return 0 if successful or negative error.
*/
aaudio_result_t loadSymbols() {
if (mAAudio_getMMapPolicy != nullptr) {
return 0;
}
AAudioLoader *libLoader = AAudioLoader::getInstance();
int openResult = libLoader->open();
if (openResult != 0) {
LOGD("%s() could not open " LIB_AAUDIO_NAME, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
void *libHandle = AAudioLoader::getInstance()->getLibHandle();
if (libHandle == nullptr) {
LOGE("%s() could not find " LIB_AAUDIO_NAME, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudioStream_isMMap = (bool (*)(AAudioStream *stream))
dlsym(libHandle, FUNCTION_IS_MMAP);
if (mAAudioStream_isMMap == nullptr) {
LOGI("%s() could not find " FUNCTION_IS_MMAP, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudio_setMMapPolicy = (int32_t (*)(aaudio_policy_t policy))
dlsym(libHandle, FUNCTION_SET_MMAP_POLICY);
if (mAAudio_setMMapPolicy == nullptr) {
LOGI("%s() could not find " FUNCTION_SET_MMAP_POLICY, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
mAAudio_getMMapPolicy = (aaudio_policy_t (*)())
dlsym(libHandle, FUNCTION_GET_MMAP_POLICY);
if (mAAudio_getMMapPolicy == nullptr) {
LOGI("%s() could not find " FUNCTION_GET_MMAP_POLICY, __func__);
return AAUDIO_ERROR_UNAVAILABLE;
}
return 0;
}
bool mMMapSupported = false;
bool mMMapExclusiveSupported = false;
bool (*mAAudioStream_isMMap)(AAudioStream *stream) = nullptr;
int32_t (*mAAudio_setMMapPolicy)(aaudio_policy_t policy) = nullptr;
aaudio_policy_t (*mAAudio_getMMapPolicy)() = nullptr;
};
} // namespace oboe
#endif //OBOE_AAUDIO_EXTENSIONS_H

View File

@ -24,10 +24,17 @@
namespace oboe {
AAudioLoader::~AAudioLoader() {
if (mLibHandle != nullptr) {
dlclose(mLibHandle);
mLibHandle = nullptr;
}
// Issue 360: thread_local variables with non-trivial destructors
// will cause segfaults if the containing library is dlclose()ed on
// devices running M or newer, or devices before M when using a static STL.
// The simple workaround is to not call dlclose.
// https://github.com/android/ndk/wiki/Changelog-r22#known-issues
//
// The libaaudio and libaaudioclient do not use thread_local.
// But, to be safe, we should avoid dlclose() if possible.
// Because AAudioLoader is a static Singleton, we can safely skip
// calling dlclose() without causing a resource leak.
LOGI("%s() dlclose(%s) not called, OK", __func__, LIB_AAUDIO_NAME);
}
AAudioLoader* AAudioLoader::getInstance() {
@ -76,6 +83,15 @@ int AAudioLoader::open() {
builder_setSessionId = load_V_PBI("AAudioStreamBuilder_setSessionId");
}
if (getSdkVersion() >= __ANDROID_API_S__){
builder_setPackageName = load_V_PBCPH("AAudioStreamBuilder_setPackageName");
builder_setAttributionTag = load_V_PBCPH("AAudioStreamBuilder_setAttributionTag");
}
if (getSdkVersion() >= __ANDROID_API_S_V2__) {
builder_setChannelMask = load_V_PBU("AAudioStreamBuilder_setChannelMask");
}
builder_delete = load_I_PB("AAudioStreamBuilder_delete");
@ -90,8 +106,6 @@ int AAudioLoader::open() {
stream_getTimestamp = load_I_PSKPLPL("AAudioStream_getTimestamp");
stream_isMMapUsed = load_B_PS("AAudioStream_isMMapUsed");
stream_getChannelCount = load_I_PS("AAudioStream_getChannelCount");
if (stream_getChannelCount == nullptr) {
// Use old alias if needed.
@ -128,6 +142,10 @@ int AAudioLoader::open() {
stream_getInputPreset = load_I_PS("AAudioStream_getInputPreset");
stream_getSessionId = load_I_PS("AAudioStream_getSessionId");
}
if (getSdkVersion() >= __ANDROID_API_S_V2__) {
stream_getChannelMask = load_U_PS("AAudioStream_getChannelMask");
}
return 0;
}
@ -155,6 +173,12 @@ AAudioLoader::signature_V_PBI AAudioLoader::load_V_PBI(const char *functionName)
return reinterpret_cast<signature_V_PBI>(proc);
}
AAudioLoader::signature_V_PBCPH AAudioLoader::load_V_PBCPH(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_V_PBCPH>(proc);
}
AAudioLoader::signature_V_PBPDPV AAudioLoader::load_V_PBPDPV(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
@ -233,10 +257,26 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
return reinterpret_cast<signature_I_PSKPLPL>(proc);
}
AAudioLoader::signature_V_PBU AAudioLoader::load_V_PBU(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_V_PBU>(proc);
}
AAudioLoader::signature_U_PS AAudioLoader::load_U_PS(const char *functionName) {
void *proc = dlsym(mLibHandle, functionName);
AAudioLoader_check(proc, functionName);
return reinterpret_cast<signature_U_PS>(proc);
}
// Ensure that all AAudio primitive data types are int32_t
#define ASSERT_INT32(type) static_assert(std::is_same<int32_t, type>::value, \
#type" must be int32_t")
// Ensure that all AAudio primitive data types are uint32_t
#define ASSERT_UINT32(type) static_assert(std::is_same<uint32_t, type>::value, \
#type" must be uint32_t")
#define ERRMSG "Oboe constants must match AAudio constants."
// These asserts help verify that the Oboe definitions match the equivalent AAudio definitions.
@ -304,7 +344,6 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
== AAUDIO_PERFORMANCE_MODE_POWER_SAVING, ERRMSG);
static_assert((int32_t)PerformanceMode::LowLatency
== AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, ERRMSG);
#endif
// The aaudio_ usage, content and input_preset types were added in NDK 17,
// which is the first version to support Android Pie (API 28).
@ -343,6 +382,69 @@ AAudioLoader::signature_I_PSKPLPL AAudioLoader::load_I_PSKPLPL(const char *funct
static_assert((int32_t)SessionId::None == AAUDIO_SESSION_ID_NONE, ERRMSG);
static_assert((int32_t)SessionId::Allocate == AAUDIO_SESSION_ID_ALLOCATE, ERRMSG);
#endif // __NDK_MAJOR__ >= 17
// The aaudio channel masks were added in NDK 24,
// which is the first version to support Android SC_V2 (API 32).
#if __NDK_MAJOR__ >= 24
ASSERT_UINT32(aaudio_channel_mask_t);
static_assert((uint32_t)ChannelMask::FrontLeft == AAUDIO_CHANNEL_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontRight == AAUDIO_CHANNEL_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontCenter == AAUDIO_CHANNEL_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::LowFrequency == AAUDIO_CHANNEL_LOW_FREQUENCY, ERRMSG);
static_assert((uint32_t)ChannelMask::BackLeft == AAUDIO_CHANNEL_BACK_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::BackRight == AAUDIO_CHANNEL_BACK_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontLeftOfCenter == AAUDIO_CHANNEL_FRONT_LEFT_OF_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontRightOfCenter == AAUDIO_CHANNEL_FRONT_RIGHT_OF_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::BackCenter == AAUDIO_CHANNEL_BACK_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::SideLeft == AAUDIO_CHANNEL_SIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::SideRight == AAUDIO_CHANNEL_SIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopCenter == AAUDIO_CHANNEL_TOP_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontLeft == AAUDIO_CHANNEL_TOP_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontCenter == AAUDIO_CHANNEL_TOP_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopFrontRight == AAUDIO_CHANNEL_TOP_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackLeft == AAUDIO_CHANNEL_TOP_BACK_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackCenter == AAUDIO_CHANNEL_TOP_BACK_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::TopBackRight == AAUDIO_CHANNEL_TOP_BACK_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopSideLeft == AAUDIO_CHANNEL_TOP_SIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::TopSideRight == AAUDIO_CHANNEL_TOP_SIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontLeft == AAUDIO_CHANNEL_BOTTOM_FRONT_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontCenter == AAUDIO_CHANNEL_BOTTOM_FRONT_CENTER, ERRMSG);
static_assert((uint32_t)ChannelMask::BottomFrontRight == AAUDIO_CHANNEL_BOTTOM_FRONT_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::LowFrequency2 == AAUDIO_CHANNEL_LOW_FREQUENCY_2, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontWideLeft == AAUDIO_CHANNEL_FRONT_WIDE_LEFT, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontWideRight == AAUDIO_CHANNEL_FRONT_WIDE_RIGHT, ERRMSG);
static_assert((uint32_t)ChannelMask::Mono == AAUDIO_CHANNEL_MONO, ERRMSG);
static_assert((uint32_t)ChannelMask::Stereo == AAUDIO_CHANNEL_STEREO, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point1 == AAUDIO_CHANNEL_2POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::Tri == AAUDIO_CHANNEL_TRI, ERRMSG);
static_assert((uint32_t)ChannelMask::TriBack == AAUDIO_CHANNEL_TRI_BACK, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point1 == AAUDIO_CHANNEL_3POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point0Point2 == AAUDIO_CHANNEL_2POINT0POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM2Point1Point2 == AAUDIO_CHANNEL_2POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point0Point2 == AAUDIO_CHANNEL_3POINT0POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM3Point1Point2 == AAUDIO_CHANNEL_3POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::Quad == AAUDIO_CHANNEL_QUAD, ERRMSG);
static_assert((uint32_t)ChannelMask::QuadSide == AAUDIO_CHANNEL_QUAD_SIDE, ERRMSG);
static_assert((uint32_t)ChannelMask::Surround == AAUDIO_CHANNEL_SURROUND, ERRMSG);
static_assert((uint32_t)ChannelMask::Penta == AAUDIO_CHANNEL_PENTA, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1 == AAUDIO_CHANNEL_5POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Side == AAUDIO_CHANNEL_5POINT1_SIDE, ERRMSG);
static_assert((uint32_t)ChannelMask::CM6Point1 == AAUDIO_CHANNEL_6POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1 == AAUDIO_CHANNEL_7POINT1, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Point2 == AAUDIO_CHANNEL_5POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM5Point1Point4 == AAUDIO_CHANNEL_5POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1Point2 == AAUDIO_CHANNEL_7POINT1POINT2, ERRMSG);
static_assert((uint32_t)ChannelMask::CM7Point1Point4 == AAUDIO_CHANNEL_7POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM9Point1Point4 == AAUDIO_CHANNEL_9POINT1POINT4, ERRMSG);
static_assert((uint32_t)ChannelMask::CM9Point1Point6 == AAUDIO_CHANNEL_9POINT1POINT6, ERRMSG);
static_assert((uint32_t)ChannelMask::FrontBack == AAUDIO_CHANNEL_FRONT_BACK, ERRMSG);
#endif
#endif // AAUDIO_AAUDIO_H
} // namespace oboe

View File

@ -52,17 +52,34 @@ typedef int32_t aaudio_usage_t;
typedef int32_t aaudio_content_type_t;
typedef int32_t aaudio_input_preset_t;
typedef int32_t aaudio_session_id_t;
// There are a few definitions used by Oboe.
#define AAUDIO_OK static_cast<aaudio_result_t>(Result::OK)
#define AAUDIO_ERROR_TIMEOUT static_cast<aaudio_result_t>(Result::ErrorTimeout)
#define AAUDIO_STREAM_STATE_STARTING static_cast<aaudio_stream_state_t>(StreamState::Starting)
#define AAUDIO_STREAM_STATE_STARTED static_cast<aaudio_stream_state_t>(StreamState::Started)
#else
#include <aaudio/AAudio.h>
#include <android/ndk-version.h>
#endif
#ifndef __NDK_MAJOR__
#define __NDK_MAJOR__ 0
#endif
namespace oboe {
#if __NDK_MAJOR__ < 24
// Defined in SC_V2
typedef uint32_t aaudio_channel_mask_t;
#endif
#ifndef __ANDROID_API_S__
#define __ANDROID_API_S__ 31
#endif
#ifndef __ANDROID_API_S_V2__
#define __ANDROID_API_S_V2__ 32
#endif
namespace oboe {
/**
* The AAudio API was not available in early versions of Android.
@ -82,6 +99,7 @@ class AAudioLoader {
// P = Pointer to following data type
// C = Const prefix
// H = cHar
// U = uint32_t
typedef int32_t (*signature_I_PPB)(AAudioStreamBuilder **builder);
typedef const char * (*signature_CPH_I)(int32_t);
@ -93,6 +111,11 @@ class AAudioLoader {
// AAudioStreamBuilder_setSampleRate()
typedef void (*signature_V_PBI)(AAudioStreamBuilder *, int32_t);
// AAudioStreamBuilder_setChannelMask()
typedef void (*signature_V_PBU)(AAudioStreamBuilder *, uint32_t);
typedef void (*signature_V_PBCPH)(AAudioStreamBuilder *, const char *);
typedef int32_t (*signature_I_PS)(AAudioStream *); // AAudioStream_getSampleRate()
typedef int64_t (*signature_L_PS)(AAudioStream *); // AAudioStream_getFramesRead()
// AAudioStream_setBufferSizeInFrames()
@ -120,6 +143,8 @@ class AAudioLoader {
typedef bool (*signature_B_PS)(AAudioStream *);
typedef uint32_t (*signature_U_PS)(AAudioStream *);
static AAudioLoader* getInstance(); // singleton
/**
@ -133,6 +158,8 @@ class AAudioLoader {
*/
int open();
void *getLibHandle() const { return mLibHandle; }
// Function pointers into the AAudio shared library.
signature_I_PPB createStreamBuilder = nullptr;
@ -147,12 +174,16 @@ class AAudioLoader {
signature_V_PBI builder_setPerformanceMode = nullptr;
signature_V_PBI builder_setSampleRate = nullptr;
signature_V_PBI builder_setSharingMode = nullptr;
signature_V_PBU builder_setChannelMask = nullptr;
signature_V_PBI builder_setUsage = nullptr;
signature_V_PBI builder_setContentType = nullptr;
signature_V_PBI builder_setInputPreset = nullptr;
signature_V_PBI builder_setSessionId = nullptr;
signature_V_PBCPH builder_setPackageName = nullptr;
signature_V_PBCPH builder_setAttributionTag = nullptr;
signature_V_PBPDPV builder_setDataCallback = nullptr;
signature_V_PBPEPV builder_setErrorCallback = nullptr;
@ -167,8 +198,6 @@ class AAudioLoader {
signature_I_PSKPLPL stream_getTimestamp = nullptr;
signature_B_PS stream_isMMapUsed = nullptr;
signature_I_PS stream_close = nullptr;
signature_I_PS stream_getChannelCount = nullptr;
@ -199,6 +228,8 @@ class AAudioLoader {
signature_I_PS stream_getInputPreset = nullptr;
signature_I_PS stream_getSessionId = nullptr;
signature_U_PS stream_getChannelMask = nullptr;
private:
AAudioLoader() {}
~AAudioLoader();
@ -207,6 +238,7 @@ class AAudioLoader {
signature_I_PPB load_I_PPB(const char *name);
signature_CPH_I load_CPH_I(const char *name);
signature_V_PBI load_V_PBI(const char *name);
signature_V_PBCPH load_V_PBCPH(const char *name);
signature_V_PBPDPV load_V_PBPDPV(const char *name);
signature_V_PBPEPV load_V_PBPEPV(const char *name);
signature_I_PB load_I_PB(const char *name);
@ -220,6 +252,8 @@ class AAudioLoader {
signature_I_PSCPVIL load_I_PSCPVIL(const char *name);
signature_I_PSTPTL load_I_PSTPTL(const char *name);
signature_I_PSKPLPL load_I_PSKPLPL(const char *name);
signature_V_PBU load_V_PBU(const char *name);
signature_U_PS load_U_PS(const char *name);
void *mLibHandle = nullptr;
};

View File

@ -23,6 +23,7 @@
#include "common/AudioClock.h"
#include "common/OboeDebug.h"
#include "oboe/Utilities.h"
#include "AAudioExtensions.h"
#ifdef __ANDROID__
#include <sys/system_properties.h>
@ -61,15 +62,17 @@ static aaudio_data_callback_result_t oboe_aaudio_data_callback_proc(
// It calls app error callbacks from a static function in case the stream gets deleted.
static void oboe_aaudio_error_thread_proc(AudioStreamAAudio *oboeStream,
Result error) {
LOGD("%s() - entering >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", __func__);
oboeStream->requestStop();
if (oboeStream->getCallback() != nullptr) {
oboeStream->getCallback()->onErrorBeforeClose(oboeStream, error);
}
oboeStream->close();
if (oboeStream->getCallback() != nullptr) {
LOGD("%s(,%d) - entering >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", __func__, error);
AudioStreamErrorCallback *errorCallback = oboeStream->getErrorCallback();
if (errorCallback == nullptr) return; // should be impossible
bool isErrorHandled = errorCallback->onError(oboeStream, error);
if (!isErrorHandled) {
oboeStream->requestStop();
errorCallback->onErrorBeforeClose(oboeStream, error);
oboeStream->close();
// Warning, oboeStream may get deleted by this callback.
oboeStream->getCallback()->onErrorAfterClose(oboeStream, error);
errorCallback->onErrorAfterClose(oboeStream, error);
}
LOGD("%s() - exiting <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<", __func__);
}
@ -92,7 +95,7 @@ AudioStreamAAudio::AudioStreamAAudio(const AudioStreamBuilder &builder)
: AudioStream(builder)
, mAAudioStream(nullptr) {
mCallbackThreadEnabled.store(false);
isSupported();
mLibLoader = AAudioLoader::getInstance();
}
bool AudioStreamAAudio::isSupported() {
@ -101,16 +104,29 @@ bool AudioStreamAAudio::isSupported() {
return openResult == 0;
}
// Static 'C' wrapper for the error callback method.
// Static method for the error callback.
// We use a method so we can access protected methods on the stream.
// Launch a thread to handle the error.
// That other thread can safely stop, close and delete the stream.
void AudioStreamAAudio::internalErrorCallback(
AAudioStream *stream,
void *userData,
aaudio_result_t error) {
oboe::Result oboeResult = static_cast<Result>(error);
AudioStreamAAudio *oboeStream = reinterpret_cast<AudioStreamAAudio*>(userData);
// Prevents deletion of the stream if the app is using AudioStreamBuilder::openSharedStream()
// Coerce the error code if needed to workaround a regression in RQ1A that caused
// the wrong code to be passed when headsets plugged in. See b/173928197.
if (OboeGlobals::areWorkaroundsEnabled()
&& getSdkVersion() == __ANDROID_API_R__
&& oboeResult == oboe::Result::ErrorTimeout) {
oboeResult = oboe::Result::ErrorDisconnected;
LOGD("%s() ErrorTimeout changed to ErrorDisconnected to fix b/173928197", __func__);
}
oboeStream->mErrorCallbackResult = oboeResult;
// Prevents deletion of the stream if the app is using AudioStreamBuilder::openStream(shared_ptr)
std::shared_ptr<AudioStream> sharedStream = oboeStream->lockWeakThis();
// These checks should be enough because we assume that the stream close()
@ -118,16 +134,14 @@ void AudioStreamAAudio::internalErrorCallback(
if (oboeStream->wasErrorCallbackCalled()) { // block extra error callbacks
LOGE("%s() multiple error callbacks called!", __func__);
} else if (stream != oboeStream->getUnderlyingStream()) {
LOGW("%s() stream already closed", __func__); // can happen if there are bugs
LOGW("%s() stream already closed or closing", __func__); // might happen if there are bugs
} else if (sharedStream) {
// Handle error on a separate thread using shared pointer.
std::thread t(oboe_aaudio_error_thread_proc_shared, sharedStream,
static_cast<Result>(error));
std::thread t(oboe_aaudio_error_thread_proc_shared, sharedStream, oboeResult);
t.detach();
} else {
// Handle error on a separate thread.
std::thread t(oboe_aaudio_error_thread_proc, oboeStream,
static_cast<Result>(error));
std::thread t(oboe_aaudio_error_thread_proc, oboeStream, oboeResult);
t.detach();
}
}
@ -191,7 +205,18 @@ Result AudioStreamAAudio::open() {
}
mLibLoader->builder_setBufferCapacityInFrames(aaudioBuilder, capacity);
mLibLoader->builder_setChannelCount(aaudioBuilder, mChannelCount);
// Channel mask was added in SC_V2. Given the corresponding channel count of selected channel
// mask may be different from selected channel count, the last set value will be respected.
// If channel count is set after channel mask, the previously set channel mask will be cleared.
// If channel mask is set after channel count, the channel count will be automatically
// calculated from selected channel mask. In that case, only set channel mask when the API
// is available and the channel mask is specified.
if (mLibLoader->builder_setChannelMask != nullptr && mChannelMask != ChannelMask::Unspecified) {
mLibLoader->builder_setChannelMask(aaudioBuilder,
static_cast<aaudio_channel_mask_t>(mChannelMask));
} else {
mLibLoader->builder_setChannelCount(aaudioBuilder, mChannelCount);
}
mLibLoader->builder_setDeviceId(aaudioBuilder, mDeviceId);
mLibLoader->builder_setDirection(aaudioBuilder, static_cast<aaudio_direction_t>(mDirection));
mLibLoader->builder_setFormat(aaudioBuilder, static_cast<aaudio_format_t>(mFormat));
@ -213,8 +238,13 @@ Result AudioStreamAAudio::open() {
}
if (mLibLoader->builder_setInputPreset != nullptr) {
aaudio_input_preset_t inputPreset = mInputPreset;
if (getSdkVersion() <= __ANDROID_API_P__ && inputPreset == InputPreset::VoicePerformance) {
LOGD("InputPreset::VoicePerformance not supported before Q. Using VoiceRecognition.");
inputPreset = InputPreset::VoiceRecognition; // most similar preset
}
mLibLoader->builder_setInputPreset(aaudioBuilder,
static_cast<aaudio_input_preset_t>(mInputPreset));
static_cast<aaudio_input_preset_t>(inputPreset));
}
if (mLibLoader->builder_setSessionId != nullptr) {
@ -222,15 +252,30 @@ Result AudioStreamAAudio::open() {
static_cast<aaudio_session_id_t>(mSessionId));
}
// TODO get more parameters from the builder?
// These were added in S so we have to check for the function pointer.
if (mLibLoader->builder_setPackageName != nullptr && !mPackageName.empty()) {
mLibLoader->builder_setPackageName(aaudioBuilder,
mPackageName.c_str());
}
if (mStreamCallback != nullptr) {
if (mLibLoader->builder_setAttributionTag != nullptr && !mAttributionTag.empty()) {
mLibLoader->builder_setAttributionTag(aaudioBuilder,
mAttributionTag.c_str());
}
if (isDataCallbackSpecified()) {
mLibLoader->builder_setDataCallback(aaudioBuilder, oboe_aaudio_data_callback_proc, this);
mLibLoader->builder_setFramesPerDataCallback(aaudioBuilder, getFramesPerCallback());
// If the data callback is not being used then the write method will return an error
// and the app can stop and close the stream.
mLibLoader->builder_setFramesPerDataCallback(aaudioBuilder, getFramesPerDataCallback());
if (!isErrorCallbackSpecified()) {
// The app did not specify a callback so we should specify
// our own so the stream gets closed and stopped.
mErrorCallback = &mDefaultErrorCallback;
}
mLibLoader->builder_setErrorCallback(aaudioBuilder, internalErrorCallback, this);
}
// Else if the data callback is not being used then the write method will return an error
// and the app can stop and close the stream.
// ============= OPEN THE STREAM ================
{
@ -239,6 +284,11 @@ Result AudioStreamAAudio::open() {
mAAudioStream.store(stream);
}
if (result != Result::OK) {
// Warn developer because ErrorInternal is not very informative.
if (result == Result::ErrorInternal && mDirection == Direction::Input) {
LOGW("AudioStreamAAudio.open() may have failed due to lack of "
"audio recording permission.");
}
goto error2;
}
@ -252,7 +302,7 @@ Result AudioStreamAAudio::open() {
mLibLoader->stream_getPerformanceMode(mAAudioStream));
mBufferCapacityInFrames = mLibLoader->stream_getBufferCapacity(mAAudioStream);
mBufferSizeInFrames = mLibLoader->stream_getBufferSize(mAAudioStream);
mFramesPerBurst = mLibLoader->stream_getFramesPerBurst(mAAudioStream);
// These were added in P so we have to check for the function pointer.
if (mLibLoader->stream_getUsage != nullptr) {
@ -270,10 +320,16 @@ Result AudioStreamAAudio::open() {
mSessionId = SessionId::None;
}
if (mLibLoader->stream_getChannelMask != nullptr) {
mChannelMask = static_cast<ChannelMask>(mLibLoader->stream_getChannelMask(mAAudioStream));
}
LOGD("AudioStreamAAudio.open() format=%d, sampleRate=%d, capacity = %d",
static_cast<int>(mFormat), static_cast<int>(mSampleRate),
static_cast<int>(mBufferCapacityInFrames));
calculateDefaultDelayBeforeCloseMillis();
error2:
mLibLoader->builder_delete(aaudioBuilder);
LOGD("AudioStreamAAudio.open: AAudioStream_Open() returned %s",
@ -282,24 +338,49 @@ error2:
}
Result AudioStreamAAudio::close() {
// The main reason we have this mutex if to prevent a collision between a call
// by the application to stop a stream at the same time that an onError callback
// is being executed because of a disconnect. The close will delete the stream,
// which could otherwise cause the requestStop() to crash.
// Prevent two threads from closing the stream at the same time and crashing.
// This could occur, for example, if an application called close() at the same
// time that an onError callback was being executed because of a disconnect.
std::lock_guard<std::mutex> lock(mLock);
AudioStream::close();
// This will delete the AAudio stream object so we need to null out the pointer.
AAudioStream *stream = mAAudioStream.exchange(nullptr);
AAudioStream *stream = nullptr;
{
// Wait for any methods using mAAudioStream to finish.
std::unique_lock<std::shared_mutex> lock2(mAAudioStreamLock);
// Closing will delete *mAAudioStream so we need to null out the pointer atomically.
stream = mAAudioStream.exchange(nullptr);
}
if (stream != nullptr) {
if (OboeGlobals::areWorkaroundsEnabled()) {
// Make sure we are really stopped. Do it under mLock
// so another thread cannot call requestStart() right before the close.
requestStop_l(stream);
sleepBeforeClose();
}
return static_cast<Result>(mLibLoader->stream_close(stream));
} else {
return Result::ErrorClosed;
}
}
DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream *stream,
static void oboe_stop_thread_proc(AudioStream *oboeStream) {
if (oboeStream != nullptr) {
oboeStream->requestStop();
}
}
void AudioStreamAAudio::launchStopThread() {
// Prevent multiple stop threads from being launched.
if (mStopThreadAllowed.exchange(false)) {
// Stop this stream on a separate thread
std::thread t(oboe_stop_thread_proc, this);
t.detach();
}
}
DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream * /*stream*/,
void *audioData,
int32_t numFrames) {
DataCallbackResult result = fireDataCallback(audioData, numFrames);
@ -312,16 +393,12 @@ DataCallbackResult AudioStreamAAudio::callOnAudioReady(AAudioStream *stream,
LOGE("Oboe callback returned unexpected value = %d", result);
}
if (getSdkVersion() <= __ANDROID_API_P__) {
// Returning Stop caused various problems before S. See #1230
if (OboeGlobals::areWorkaroundsEnabled() && getSdkVersion() <= __ANDROID_API_R__) {
launchStopThread();
if (isMMapUsed()) {
return DataCallbackResult::Stop;
} else {
// Legacy stream <= API_P cannot be restarted after returning Stop.
return DataCallbackResult::Continue;
}
return DataCallbackResult::Continue;
} else {
return DataCallbackResult::Stop; // OK >= API_Q
return DataCallbackResult::Stop; // OK >= API_S
}
}
}
@ -338,9 +415,10 @@ Result AudioStreamAAudio::requestStart() {
return Result::OK;
}
}
if (mStreamCallback != nullptr) { // Was a callback requested?
if (isDataCallbackSpecified()) {
setDataCallbackEnabled(true);
}
mStopThreadAllowed = true;
return static_cast<Result>(mLibLoader->stream_requestStart(stream));
} else {
return Result::ErrorClosed;
@ -385,22 +463,28 @@ Result AudioStreamAAudio::requestStop() {
std::lock_guard<std::mutex> lock(mLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
// Avoid state machine errors in O_MR1.
if (getSdkVersion() <= __ANDROID_API_O_MR1__) {
StreamState state = static_cast<StreamState>(mLibLoader->stream_getState(stream));
if (state == StreamState::Stopping || state == StreamState::Stopped) {
return Result::OK;
}
}
return static_cast<Result>(mLibLoader->stream_requestStop(stream));
return requestStop_l(stream);
} else {
return Result::ErrorClosed;
}
}
// Call under mLock
Result AudioStreamAAudio::requestStop_l(AAudioStream *stream) {
// Avoid state machine errors in O_MR1.
if (getSdkVersion() <= __ANDROID_API_O_MR1__) {
StreamState state = static_cast<StreamState>(mLibLoader->stream_getState(stream));
if (state == StreamState::Stopping || state == StreamState::Stopped) {
return Result::OK;
}
}
return static_cast<Result>(mLibLoader->stream_requestStop(stream));
}
ResultWithValue<int32_t> AudioStreamAAudio::write(const void *buffer,
int32_t numFrames,
int64_t timeoutNanoseconds) {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
int32_t result = mLibLoader->stream_write(mAAudioStream, buffer,
@ -414,6 +498,7 @@ ResultWithValue<int32_t> AudioStreamAAudio::write(const void *buffer,
ResultWithValue<int32_t> AudioStreamAAudio::read(void *buffer,
int32_t numFrames,
int64_t timeoutNanoseconds) {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
int32_t result = mLibLoader->stream_read(mAAudioStream, buffer,
@ -497,29 +582,27 @@ Result AudioStreamAAudio::waitForStateChange(StreamState currentState,
}
ResultWithValue<int32_t> AudioStreamAAudio::setBufferSizeInFrames(int32_t requestedFrames) {
int32_t adjustedFrames = requestedFrames;
if (adjustedFrames > mBufferCapacityInFrames) {
adjustedFrames = mBufferCapacityInFrames;
}
// This calls getBufferSize() so avoid recursive lock.
adjustedFrames = QuirksManager::getInstance().clipBufferSize(*this, adjustedFrames);
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
int32_t adjustedFrames = requestedFrames;
if (adjustedFrames > mBufferCapacityInFrames) {
adjustedFrames = mBufferCapacityInFrames;
}
adjustedFrames = QuirksManager::getInstance().clipBufferSize(*this, adjustedFrames);
int32_t newBufferSize = mLibLoader->stream_setBufferSize(mAAudioStream, adjustedFrames);
// Cache the result if it's valid
if (newBufferSize > 0) mBufferSizeInFrames = newBufferSize;
return ResultWithValue<int32_t>::createBasedOnSign(newBufferSize);
} else {
return ResultWithValue<int32_t>(Result::ErrorClosed);
}
}
StreamState AudioStreamAAudio::getState() const {
StreamState AudioStreamAAudio::getState() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
aaudio_stream_state_t aaudioState = mLibLoader->stream_getState(stream);
@ -536,6 +619,7 @@ StreamState AudioStreamAAudio::getState() const {
}
int32_t AudioStreamAAudio::getBufferSizeInFrames() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
mBufferSizeInFrames = mLibLoader->stream_getBufferSize(stream);
@ -543,29 +627,34 @@ int32_t AudioStreamAAudio::getBufferSizeInFrames() {
return mBufferSizeInFrames;
}
int32_t AudioStreamAAudio::getFramesPerBurst() {
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
mFramesPerBurst = mLibLoader->stream_getFramesPerBurst(stream);
}
return mFramesPerBurst;
}
void AudioStreamAAudio::updateFramesRead() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
// Set to 1 for debugging race condition #1180 with mAAudioStream.
// See also DEBUG_CLOSE_RACE in OboeTester.
// This was left in the code so that we could test the fix again easily in the future.
// We could not trigger the race condition without adding these get calls and the sleeps.
#define DEBUG_CLOSE_RACE 0
#if DEBUG_CLOSE_RACE
// This is used when testing race conditions with close().
// See DEBUG_CLOSE_RACE in OboeTester
AudioClock::sleepForNanos(400 * kNanosPerMillisecond);
#endif // DEBUG_CLOSE_RACE
if (stream != nullptr) {
mFramesRead = mLibLoader->stream_getFramesRead(stream);
}
}
void AudioStreamAAudio::updateFramesWritten() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
mFramesWritten = mLibLoader->stream_getFramesWritten(stream);
}
}
ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() const {
ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
return ResultWithValue<int32_t>::createBasedOnSign(mLibLoader->stream_getXRunCount(stream));
@ -577,11 +666,12 @@ ResultWithValue<int32_t> AudioStreamAAudio::getXRunCount() const {
Result AudioStreamAAudio::getTimestamp(clockid_t clockId,
int64_t *framePosition,
int64_t *timeNanoseconds) {
if (getState() != StreamState::Started) {
return Result::ErrorInvalidState;
}
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
if (getState() != StreamState::Started) {
return Result::ErrorInvalidState;
}
return static_cast<Result>(mLibLoader->stream_getTimestamp(stream, clockId,
framePosition, timeNanoseconds));
} else {
@ -590,11 +680,6 @@ Result AudioStreamAAudio::getTimestamp(clockid_t clockId,
}
ResultWithValue<double> AudioStreamAAudio::calculateLatencyMillis() {
AAudioStream *stream = mAAudioStream.load();
if (stream == nullptr) {
return ResultWithValue<double>(Result::ErrorClosed);
}
// Get the time that a known audio frame was presented.
int64_t hardwareFrameIndex;
int64_t hardwareFrameHardwareTime;
@ -632,9 +717,10 @@ ResultWithValue<double> AudioStreamAAudio::calculateLatencyMillis() {
}
bool AudioStreamAAudio::isMMapUsed() {
std::shared_lock<std::shared_mutex> lock(mAAudioStreamLock);
AAudioStream *stream = mAAudioStream.load();
if (stream != nullptr) {
return mLibLoader->stream_isMMapUsed(stream);
return AAudioExtensions::getInstance().isMMapUsed(stream);
} else {
return false;
}

View File

@ -18,6 +18,7 @@
#define OBOE_STREAM_AAUDIO_H_
#include <atomic>
#include <shared_mutex>
#include <mutex>
#include <thread>
@ -67,8 +68,7 @@ public:
ResultWithValue<int32_t> setBufferSizeInFrames(int32_t requestedFrames) override;
int32_t getBufferSizeInFrames() override;
int32_t getFramesPerBurst() override;
ResultWithValue<int32_t> getXRunCount() const override;
ResultWithValue<int32_t> getXRunCount() override;
bool isXRunCountSupported() const override { return true; }
ResultWithValue<double> calculateLatencyMillis() override;
@ -81,7 +81,7 @@ public:
int64_t *framePosition,
int64_t *timeNanoseconds) override;
StreamState getState() const override;
StreamState getState() override;
AudioApi getAudioApi() const override {
return AudioApi::AAudio;
@ -91,7 +91,7 @@ public:
void *audioData,
int32_t numFrames);
bool isMMapUsed();
bool isMMapUsed();
protected:
static void internalErrorCallback(
@ -108,14 +108,33 @@ protected:
void logUnsupportedAttributes();
private:
// Must call under mLock. And stream must NOT be nullptr.
Result requestStop_l(AAudioStream *stream);
/**
* Launch a thread that will stop the stream.
*/
void launchStopThread();
public:
int32_t getMDelayBeforeCloseMillis() const;
void setDelayBeforeCloseMillis(int32_t mDelayBeforeCloseMillis);
private:
std::atomic<bool> mCallbackThreadEnabled;
std::atomic<bool> mStopThreadAllowed{false};
// pointer to the underlying AAudio stream, valid if open, null if closed
// pointer to the underlying 'C' AAudio stream, valid if open, null if closed
std::atomic<AAudioStream *> mAAudioStream{nullptr};
std::shared_mutex mAAudioStreamLock; // to protect mAAudioStream while closing
static AAudioLoader *mLibLoader;
// We may not use this but it is so small that it is not worth allocating dynamically.
AudioStreamErrorCallback mDefaultErrorCallback;
};
} // namespace oboe

View File

@ -20,7 +20,7 @@ using namespace oboe;
using namespace flowgraph;
int32_t AudioSourceCaller::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) {
oboe::AudioStreamCallback *callback = mStream->getCallback();
AudioStreamDataCallback *callback = mStream->getDataCallback();
int32_t result = 0;
int32_t numFrames = numBytes / mStream->getBytesPerFrame();
if (callback != nullptr) {

View File

@ -55,14 +55,14 @@ void AudioStream::checkScheduler() {
DataCallbackResult AudioStream::fireDataCallback(void *audioData, int32_t numFrames) {
if (!isDataCallbackEnabled()) {
LOGW("AudioStream::%s() called with data callback disabled!", __func__);
return DataCallbackResult::Stop; // We should not be getting called any more.
return DataCallbackResult::Stop; // Should not be getting called
}
DataCallbackResult result;
if (mStreamCallback == nullptr) {
result = onDefaultCallback(audioData, numFrames);
if (mDataCallback) {
result = mDataCallback->onAudioReady(this, audioData, numFrames);
} else {
result = mStreamCallback->onAudioReady(this, audioData, numFrames);
result = onDefaultCallback(audioData, numFrames);
}
// On Oreo, we might get called after returning stop.
// So block that here.
@ -196,16 +196,13 @@ ResultWithValue<FrameTimestamp> AudioStream::getTimestamp(clockid_t clockId) {
}
}
static void oboe_stop_thread_proc(AudioStream *oboeStream) {
if (oboeStream != nullptr) {
oboeStream->requestStop();
}
}
void AudioStream::launchStopThread() {
// Stop this stream on a separate thread
std::thread t(oboe_stop_thread_proc, this);
t.detach();
void AudioStream::calculateDefaultDelayBeforeCloseMillis() {
// Calculate delay time before close based on burst duration.
// Start with a burst duration then add 1 msec as a safety margin.
mDelayBeforeCloseMillis = std::max(kMinDelayBeforeCloseMillis,
1 + ((mFramesPerBurst * 1000) / getSampleRate()));
LOGD("calculateDefaultDelayBeforeCloseMillis() default = %d",
static_cast<int>(mDelayBeforeCloseMillis));
}
} // namespace oboe

View File

@ -16,6 +16,8 @@
#include <sys/types.h>
#include "aaudio/AAudioExtensions.h"
#include "aaudio/AudioStreamAAudio.h"
#include "FilterAudioStream.h"
#include "OboeDebug.h"
@ -80,13 +82,19 @@ AudioStream *AudioStreamBuilder::build() {
}
bool AudioStreamBuilder::isCompatible(AudioStreamBase &other) {
return getSampleRate() == other.getSampleRate()
&& getFormat() == other.getFormat()
&& getChannelCount() == other.getChannelCount();
return (getSampleRate() == oboe::Unspecified || getSampleRate() == other.getSampleRate())
&& (getFormat() == (AudioFormat)oboe::Unspecified || getFormat() == other.getFormat())
&& (getFramesPerDataCallback() == oboe::Unspecified || getFramesPerDataCallback() == other.getFramesPerDataCallback())
&& (getChannelCount() == oboe::Unspecified || getChannelCount() == other.getChannelCount());
}
Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
Result result = Result::OK;
auto result = isValidConfig();
if (result != Result::OK) {
LOGW("%s() invalid config %d", __func__, result);
return result;
}
LOGI("%s() %s -------- %s --------",
__func__, getDirection() == Direction::Input ? "INPUT" : "OUTPUT", getVersionText());
@ -104,14 +112,13 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
// Do we need to make a child stream and convert.
if (conversionNeeded) {
AudioStream *tempStream;
result = childBuilder.openStream(&tempStream);
if (result != Result::OK) {
return result;
}
if (isCompatible(*tempStream)) {
// Everything matches so we can just use the child stream directly.
// The child stream would work as the requested stream so we can just use it directly.
*streamPP = tempStream;
return result;
} else {
@ -126,6 +133,9 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
if (getSampleRate() == oboe::Unspecified) {
parentBuilder.setSampleRate(tempStream->getSampleRate());
}
if (getFramesPerDataCallback() == oboe::Unspecified) {
parentBuilder.setFramesPerCallback(tempStream->getFramesPerDataCallback());
}
// Use childStream in a FilterAudioStream.
LOGI("%s() create a FilterAudioStream for data conversion.", __func__);
@ -148,7 +158,20 @@ Result AudioStreamBuilder::openStream(AudioStream **streamPP) {
}
}
result = streamP->open(); // TODO review API
// If MMAP has a problem in this case then disable it temporarily.
bool wasMMapOriginallyEnabled = AAudioExtensions::getInstance().isMMapEnabled();
bool wasMMapTemporarilyDisabled = false;
if (wasMMapOriginallyEnabled) {
bool isMMapSafe = QuirksManager::getInstance().isMMapSafe(childBuilder);
if (!isMMapSafe) {
AAudioExtensions::getInstance().setMMapEnabled(false);
wasMMapTemporarilyDisabled = true;
}
}
result = streamP->open();
if (wasMMapTemporarilyDisabled) {
AAudioExtensions::getInstance().setMMapEnabled(wasMMapOriginallyEnabled); // restore original
}
if (result == Result::OK) {
int32_t optimalBufferSize = -1;

View File

@ -20,16 +20,21 @@
#include "DataConversionFlowGraph.h"
#include "SourceFloatCaller.h"
#include "SourceI16Caller.h"
#include "SourceI24Caller.h"
#include "SourceI32Caller.h"
#include <flowgraph/ClipToRange.h>
#include <flowgraph/MonoToMultiConverter.h>
#include <flowgraph/MultiToMonoConverter.h>
#include <flowgraph/RampLinear.h>
#include <flowgraph/SinkFloat.h>
#include <flowgraph/SinkI16.h>
#include <flowgraph/SinkI24.h>
#include <flowgraph/SinkI32.h>
#include <flowgraph/SourceFloat.h>
#include <flowgraph/SourceI16.h>
#include <flowgraph/SourceI24.h>
#include <flowgraph/SourceI32.h>
#include <flowgraph/SampleRateConverter.h>
using namespace oboe;
@ -81,34 +86,47 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
AudioFormat sourceFormat = sourceStream->getFormat();
int32_t sourceChannelCount = sourceStream->getChannelCount();
int32_t sourceSampleRate = sourceStream->getSampleRate();
int32_t sourceFramesPerCallback = sourceStream->getFramesPerDataCallback();
AudioFormat sinkFormat = sinkStream->getFormat();
int32_t sinkChannelCount = sinkStream->getChannelCount();
int32_t sinkSampleRate = sinkStream->getSampleRate();
int32_t sinkFramesPerCallback = sinkStream->getFramesPerDataCallback();
LOGI("%s() flowgraph converts channels: %d to %d, format: %d to %d, rate: %d to %d, qual = %d",
LOGI("%s() flowgraph converts channels: %d to %d, format: %d to %d"
", rate: %d to %d, cbsize: %d to %d, qual = %d",
__func__,
sourceChannelCount, sinkChannelCount,
sourceFormat, sinkFormat,
sourceSampleRate, sinkSampleRate,
sourceFramesPerCallback, sinkFramesPerCallback,
sourceStream->getSampleRateConversionQuality());
int32_t framesPerCallback = (sourceStream->getFramesPerCallback() == kUnspecified)
? sourceStream->getFramesPerBurst()
: sourceStream->getFramesPerCallback();
// Source
// If OUTPUT and using a callback then call back to the app using a SourceCaller.
// If INPUT and NOT using a callback then read from the child stream using a SourceCaller.
if ((sourceStream->getCallback() != nullptr && isOutput)
|| (sourceStream->getCallback() == nullptr && isInput)) {
// IF OUTPUT and using a callback then call back to the app using a SourceCaller.
// OR IF INPUT and NOT using a callback then read from the child stream using a SourceCaller.
bool isDataCallbackSpecified = sourceStream->isDataCallbackSpecified();
if ((isDataCallbackSpecified && isOutput)
|| (!isDataCallbackSpecified && isInput)) {
int32_t actualSourceFramesPerCallback = (sourceFramesPerCallback == kUnspecified)
? sourceStream->getFramesPerBurst()
: sourceFramesPerCallback;
switch (sourceFormat) {
case AudioFormat::Float:
mSourceCaller = std::make_unique<SourceFloatCaller>(sourceChannelCount,
framesPerCallback);
actualSourceFramesPerCallback);
break;
case AudioFormat::I16:
mSourceCaller = std::make_unique<SourceI16Caller>(sourceChannelCount,
framesPerCallback);
actualSourceFramesPerCallback);
break;
case AudioFormat::I24:
mSourceCaller = std::make_unique<SourceI24Caller>(sourceChannelCount,
actualSourceFramesPerCallback);
break;
case AudioFormat::I32:
mSourceCaller = std::make_unique<SourceI32Caller>(sourceChannelCount,
actualSourceFramesPerCallback);
break;
default:
LOGE("%s() Unsupported source caller format = %d", __func__, sourceFormat);
@ -117,8 +135,8 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
mSourceCaller->setStream(sourceStream);
lastOutput = &mSourceCaller->output;
} else {
// If OUTPUT and NOT using a callback then write to the child stream using a BlockWriter.
// If INPUT and using a callback then write to the app using a BlockWriter.
// IF OUTPUT and NOT using a callback then write to the child stream using a BlockWriter.
// OR IF INPUT and using a callback then write to the app using a BlockWriter.
switch (sourceFormat) {
case AudioFormat::Float:
mSource = std::make_unique<SourceFloat>(sourceChannelCount);
@ -126,40 +144,72 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
case AudioFormat::I16:
mSource = std::make_unique<SourceI16>(sourceChannelCount);
break;
case AudioFormat::I24:
mSource = std::make_unique<SourceI24>(sourceChannelCount);
break;
case AudioFormat::I32:
mSource = std::make_unique<SourceI32>(sourceChannelCount);
break;
default:
LOGE("%s() Unsupported source format = %d", __func__, sourceFormat);
return Result::ErrorIllegalArgument;
}
if (isInput) {
int32_t actualSinkFramesPerCallback = (sinkFramesPerCallback == kUnspecified)
? sinkStream->getFramesPerBurst()
: sinkFramesPerCallback;
// The BlockWriter is after the Sink so use the SinkStream size.
mBlockWriter.open(framesPerCallback * sinkStream->getBytesPerFrame());
mBlockWriter.open(actualSinkFramesPerCallback * sinkStream->getBytesPerFrame());
mAppBuffer = std::make_unique<uint8_t[]>(
kDefaultBufferSize * sinkStream->getBytesPerFrame());
}
lastOutput = &mSource->output;
}
// If we are going to reduce the number of channels then do it before the
// sample rate converter.
if (sourceChannelCount > sinkChannelCount) {
if (sinkChannelCount == 1) {
mMultiToMonoConverter = std::make_unique<MultiToMonoConverter>(sourceChannelCount);
lastOutput->connect(&mMultiToMonoConverter->input);
lastOutput = &mMultiToMonoConverter->output;
} else {
mChannelCountConverter = std::make_unique<ChannelCountConverter>(
sourceChannelCount,
sinkChannelCount);
lastOutput->connect(&mChannelCountConverter->input);
lastOutput = &mChannelCountConverter->output;
}
}
// Sample Rate conversion
if (sourceSampleRate != sinkSampleRate) {
mResampler.reset(MultiChannelResampler::make(sourceChannelCount,
// Create a resampler to do the math.
mResampler.reset(MultiChannelResampler::make(lastOutput->getSamplesPerFrame(),
sourceSampleRate,
sinkSampleRate,
convertOboeSRQualityToMCR(
sourceStream->getSampleRateConversionQuality())));
mRateConverter = std::make_unique<SampleRateConverter>(sourceChannelCount,
// Make a flowgraph node that uses the resampler.
mRateConverter = std::make_unique<SampleRateConverter>(lastOutput->getSamplesPerFrame(),
*mResampler.get());
lastOutput->connect(&mRateConverter->input);
lastOutput = &mRateConverter->output;
}
// Expand the number of channels if required.
if (sourceChannelCount == 1 && sinkChannelCount > 1) {
mChannelConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
lastOutput->connect(&mChannelConverter->input);
lastOutput = &mChannelConverter->output;
} else if (sourceChannelCount != sinkChannelCount) {
LOGW("%s() Channel reduction not supported.", __func__);
return Result::ErrorUnimplemented; // TODO
if (sourceChannelCount < sinkChannelCount) {
if (sourceChannelCount == 1) {
mMonoToMultiConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
lastOutput->connect(&mMonoToMultiConverter->input);
lastOutput = &mMonoToMultiConverter->output;
} else {
mChannelCountConverter = std::make_unique<ChannelCountConverter>(
sourceChannelCount,
sinkChannelCount);
lastOutput->connect(&mChannelCountConverter->input);
lastOutput = &mChannelCountConverter->output;
}
}
// Sink
@ -170,14 +220,18 @@ Result DataConversionFlowGraph::configure(AudioStream *sourceStream, AudioStream
case AudioFormat::I16:
mSink = std::make_unique<SinkI16>(sinkChannelCount);
break;
case AudioFormat::I24:
mSink = std::make_unique<SinkI24>(sinkChannelCount);
break;
case AudioFormat::I32:
mSink = std::make_unique<SinkI32>(sinkChannelCount);
break;
default:
LOGE("%s() Unsupported sink format = %d", __func__, sinkFormat);
return Result::ErrorIllegalArgument;;
}
lastOutput->connect(&mSink->input);
mFramePosition = 0;
return Result::OK;
}
@ -185,8 +239,7 @@ int32_t DataConversionFlowGraph::read(void *buffer, int32_t numFrames, int64_t t
if (mSourceCaller) {
mSourceCaller->setTimeoutNanos(timeoutNanos);
}
int32_t numRead = mSink->read(mFramePosition, buffer, numFrames);
mFramePosition += numRead;
int32_t numRead = mSink->read(buffer, numFrames);
return numRead;
}
@ -196,8 +249,7 @@ int32_t DataConversionFlowGraph::write(void *inputBuffer, int32_t numFrames) {
mSource->setData(inputBuffer, numFrames);
while (true) {
// Pull and read some data in app format into a small buffer.
int32_t framesRead = mSink->read(mFramePosition, mAppBuffer.get(), flowgraph::kDefaultBufferSize);
mFramePosition += framesRead;
int32_t framesRead = mSink->read(mAppBuffer.get(), flowgraph::kDefaultBufferSize);
if (framesRead <= 0) break;
// Write to a block adapter, which will call the destination whenever it has enough data.
int32_t bytesRead = mBlockWriter.write(mAppBuffer.get(),
@ -209,7 +261,7 @@ int32_t DataConversionFlowGraph::write(void *inputBuffer, int32_t numFrames) {
int32_t DataConversionFlowGraph::onProcessFixedBlock(uint8_t *buffer, int32_t numBytes) {
int32_t numFrames = numBytes / mFilterStream->getBytesPerFrame();
mCallbackResult = mFilterStream->getCallback()->onAudioReady(mFilterStream, buffer, numFrames);
mCallbackResult = mFilterStream->getDataCallback()->onAudioReady(mFilterStream, buffer, numFrames);
// TODO handle STOP from callback, process data remaining in the block adapter
return numBytes;
}

View File

@ -21,7 +21,9 @@
#include <stdint.h>
#include <sys/types.h>
#include <flowgraph/ChannelCountConverter.h>
#include <flowgraph/MonoToMultiConverter.h>
#include <flowgraph/MultiToMonoConverter.h>
#include <flowgraph/SampleRateConverter.h>
#include <oboe/Definitions.h>
#include "AudioSourceCaller.h"
@ -67,7 +69,9 @@ public:
private:
std::unique_ptr<flowgraph::FlowGraphSourceBuffered> mSource;
std::unique_ptr<AudioSourceCaller> mSourceCaller;
std::unique_ptr<flowgraph::MonoToMultiConverter> mChannelConverter;
std::unique_ptr<flowgraph::MonoToMultiConverter> mMonoToMultiConverter;
std::unique_ptr<flowgraph::MultiToMonoConverter> mMultiToMonoConverter;
std::unique_ptr<flowgraph::ChannelCountConverter> mChannelCountConverter;
std::unique_ptr<resampler::MultiChannelResampler> mResampler;
std::unique_ptr<flowgraph::SampleRateConverter> mRateConverter;
std::unique_ptr<flowgraph::FlowGraphSink> mSink;
@ -76,8 +80,6 @@ private:
DataCallbackResult mCallbackResult = DataCallbackResult::Continue;
AudioStream *mFilterStream = nullptr;
std::unique_ptr<uint8_t[]> mAppBuffer;
int64_t mFramePosition = 0;
};
}

View File

@ -16,6 +16,7 @@
#include <memory>
#include "OboeDebug.h"
#include "FilterAudioStream.h"
using namespace oboe;
@ -47,7 +48,7 @@ Result FilterAudioStream::configureFlowGraph() {
AudioStream *sourceStream = isOutput ? this : mChildStream.get();
AudioStream *sinkStream = isOutput ? mChildStream.get() : this;
mRateScaler = ((double) sourceStream->getSampleRate()) / sinkStream->getSampleRate();
mRateScaler = ((double) getSampleRate()) / mChildStream->getSampleRate();
return mFlowGraph->configure(sourceStream, sinkStream);
}
@ -90,3 +91,16 @@ ResultWithValue<int32_t> FilterAudioStream::read(void *buffer,
return ResultWithValue<int32_t>::createBasedOnSign(framesRead);
}
DataCallbackResult FilterAudioStream::onAudioReady(AudioStream *oboeStream,
void *audioData,
int32_t numFrames) {
int32_t framesProcessed;
if (oboeStream->getDirection() == Direction::Output) {
framesProcessed = mFlowGraph->read(audioData, numFrames, 0 /* timeout */);
} else {
framesProcessed = mFlowGraph->write(audioData, numFrames);
}
return (framesProcessed < numFrames)
? DataCallbackResult::Stop
: mFlowGraph->getDataCallbackResult();
}

View File

@ -42,8 +42,11 @@ public:
: AudioStream(builder)
, mChildStream(childStream) {
// Intercept the callback if used.
if (builder.getCallback() != nullptr) {
mStreamCallback = mChildStream->swapCallback(this);
if (builder.isErrorCallbackSpecified()) {
mErrorCallback = mChildStream->swapErrorCallback(this);
}
if (builder.isDataCallbackSpecified()) {
mDataCallback = mChildStream->swapDataCallback(this);
} else {
const int size = childStream->getFramesPerBurst() * childStream->getBytesPerFrame();
mBlockingBuffer = std::make_unique<uint8_t[]>(size);
@ -52,6 +55,9 @@ public:
// Copy parameters that may not match builder.
mBufferCapacityInFrames = mChildStream->getBufferCapacityInFrames();
mPerformanceMode = mChildStream->getPerformanceMode();
mInputPreset = mChildStream->getInputPreset();
mFramesPerBurst = mChildStream->getFramesPerBurst();
mDeviceId = mChildStream->getDeviceId();
}
virtual ~FilterAudioStream() = default;
@ -109,7 +115,7 @@ public:
int32_t numFrames,
int64_t timeoutNanoseconds) override;
StreamState getState() const override {
StreamState getState() override {
return mChildStream->getState();
}
@ -124,10 +130,6 @@ public:
return mChildStream->isXRunCountSupported();
}
int32_t getFramesPerBurst() override {
return mChildStream->getFramesPerBurst();
}
AudioApi getAudioApi() const override {
return mChildStream->getAudioApi();
}
@ -155,7 +157,7 @@ public:
return mBufferSizeInFrames;
}
ResultWithValue<int32_t> getXRunCount() const override {
ResultWithValue<int32_t> getXRunCount() override {
return mChildStream->getXRunCount();
}
@ -169,38 +171,45 @@ public:
int64_t *timeNanoseconds) override {
int64_t childPosition = 0;
Result result = mChildStream->getTimestamp(clockId, &childPosition, timeNanoseconds);
*framePosition = childPosition * mRateScaler;
// It is OK if framePosition is null.
if (framePosition) {
*framePosition = childPosition * mRateScaler;
}
return result;
}
DataCallbackResult onAudioReady(AudioStream *oboeStream,
void *audioData,
int32_t numFrames) override {
int32_t framesProcessed;
if (oboeStream->getDirection() == Direction::Output) {
framesProcessed = mFlowGraph->read(audioData, numFrames, 0 /* timeout */);
} else {
framesProcessed = mFlowGraph->write(audioData, numFrames);
int32_t numFrames) override;
bool onError(AudioStream * /*audioStream*/, Result error) override {
if (mErrorCallback != nullptr) {
return mErrorCallback->onError(this, error);
}
return (framesProcessed < numFrames)
? DataCallbackResult::Stop
: mFlowGraph->getDataCallbackResult();
return false;
}
void onErrorBeforeClose(AudioStream *oboeStream, Result error) override {
if (mStreamCallback != nullptr) {
mStreamCallback->onErrorBeforeClose(this, error);
void onErrorBeforeClose(AudioStream * /*oboeStream*/, Result error) override {
if (mErrorCallback != nullptr) {
mErrorCallback->onErrorBeforeClose(this, error);
}
}
void onErrorAfterClose(AudioStream *oboeStream, Result error) override {
void onErrorAfterClose(AudioStream * /*oboeStream*/, Result error) override {
// Close this parent stream because the callback will only close the child.
AudioStream::close();
if (mStreamCallback != nullptr) {
mStreamCallback->onErrorAfterClose(this, error);
if (mErrorCallback != nullptr) {
mErrorCallback->onErrorAfterClose(this, error);
}
}
/**
* @return last result passed from an error callback
*/
oboe::Result getLastErrorCallbackResult() const override {
return mChildStream->getLastErrorCallbackResult();
}
private:
std::unique_ptr<AudioStream> mChildStream; // this stream wraps the child stream

View File

@ -64,6 +64,9 @@ Result LatencyTuner::tune() {
// or was from stream->getBufferCapacityInFrames())
if (requestedBufferSize > mMaxBufferSize) requestedBufferSize = mMaxBufferSize;
// Note that this will not allocate more memory. It simply determines
// how much of the existing buffer capacity will be used. The size will be
// clipped to the bufferCapacity by AAudio.
auto setBufferResult = mStream.setBufferSizeInFrames(requestedBufferSize);
if (setBufferResult != Result::OK) {
result = setBufferResult;

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "oboe/OboeExtensions.h"
#include "aaudio/AAudioExtensions.h"
using namespace oboe;
bool OboeExtensions::isMMapSupported(){
return AAudioExtensions::getInstance().isMMapSupported();
}
bool OboeExtensions::isMMapEnabled(){
return AAudioExtensions::getInstance().isMMapEnabled();
}
int32_t OboeExtensions::setMMapEnabled(bool enabled){
return AAudioExtensions::getInstance().setMMapEnabled(enabled);
}
bool OboeExtensions::isMMapUsed(oboe::AudioStream *oboeStream){
return AAudioExtensions::getInstance().isMMapUsed(oboeStream);
}

View File

@ -17,6 +17,7 @@
#include <oboe/AudioStreamBuilder.h>
#include <oboe/Oboe.h>
#include "OboeDebug.h"
#include "QuirksManager.h"
using namespace oboe;
@ -51,38 +52,144 @@ int32_t QuirksManager::DeviceQuirks::clipBufferSize(AudioStream &stream,
return adjustedSize;
}
class SamsungDeviceQuirks : public QuirksManager::DeviceQuirks {
bool QuirksManager::DeviceQuirks::isAAudioMMapPossible(const AudioStreamBuilder &builder) const {
bool isSampleRateCompatible =
builder.getSampleRate() == oboe::Unspecified
|| builder.getSampleRate() == kCommonNativeRate
|| builder.getSampleRateConversionQuality() != SampleRateConversionQuality::None;
return builder.getPerformanceMode() == PerformanceMode::LowLatency
&& isSampleRateCompatible
&& builder.getChannelCount() <= kChannelCountStereo;
}
bool QuirksManager::DeviceQuirks::shouldConvertFloatToI16ForOutputStreams() {
std::string productManufacturer = getPropertyString("ro.product.manufacturer");
if (getSdkVersion() < __ANDROID_API_L__) {
return true;
} else if ((productManufacturer == "vivo") && (getSdkVersion() < __ANDROID_API_M__)) {
return true;
}
return false;
}
/**
* This is for Samsung Exynos quirks. Samsung Mobile uses Qualcomm chips so
* the QualcommDeviceQuirks would apply.
*/
class SamsungExynosDeviceQuirks : public QuirksManager::DeviceQuirks {
public:
SamsungDeviceQuirks() {
std::string arch = getPropertyString("ro.arch");
isExynos = (arch.rfind("exynos", 0) == 0); // starts with?
SamsungExynosDeviceQuirks() {
std::string chipname = getPropertyString("ro.hardware.chipname");
isExynos9810 = (chipname == "exynos9810");
isExynos990 = (chipname == "exynos990");
isExynos850 = (chipname == "exynos850");
mBuildChangelist = getPropertyInteger("ro.build.changelist", 0);
}
virtual ~SamsungDeviceQuirks() = default;
virtual ~SamsungExynosDeviceQuirks() = default;
int32_t getExclusiveBottomMarginInBursts() const override {
// TODO Make this conditional on build version when MMAP timing improves.
return isExynos ? kBottomMarginExynos : kBottomMarginOther;
return kBottomMargin;
}
int32_t getExclusiveTopMarginInBursts() const override {
return kTopMargin;
}
// See Oboe issues #824 and #1247 for more information.
bool isMonoMMapActuallyStereo() const override {
return isExynos9810 || isExynos850; // TODO We can make this version specific if it gets fixed.
}
bool isAAudioMMapPossible(const AudioStreamBuilder &builder) const override {
return DeviceQuirks::isAAudioMMapPossible(builder)
// Samsung says they use Legacy for Camcorder
&& builder.getInputPreset() != oboe::InputPreset::Camcorder;
}
bool isMMapSafe(const AudioStreamBuilder &builder) override {
const bool isInput = builder.getDirection() == Direction::Input;
// This detects b/159066712 , S20 LSI has corrupt low latency audio recording
// and turns off MMAP.
// See also https://github.com/google/oboe/issues/892
bool isRecordingCorrupted = isInput
&& isExynos990
&& mBuildChangelist < 19350896;
// Certain S9+ builds record silence when using MMAP and not using the VoiceCommunication
// preset.
// See https://github.com/google/oboe/issues/1110
bool wouldRecordSilence = isInput
&& isExynos9810
&& mBuildChangelist <= 18847185
&& (builder.getInputPreset() != InputPreset::VoiceCommunication);
if (wouldRecordSilence){
LOGI("QuirksManager::%s() Requested stream configuration would result in silence on "
"this device. Switching off MMAP.", __func__);
}
return !isRecordingCorrupted && !wouldRecordSilence;
}
private:
// Stay farther away from DSP position on Exynos devices.
static constexpr int32_t kBottomMarginExynos = 2;
static constexpr int32_t kBottomMarginOther = 1;
static constexpr int32_t kBottomMargin = 2;
static constexpr int32_t kTopMargin = 1;
bool isExynos = false;
bool isExynos9810 = false;
bool isExynos990 = false;
bool isExynos850 = false;
int mBuildChangelist = 0;
};
class QualcommDeviceQuirks : public QuirksManager::DeviceQuirks {
public:
QualcommDeviceQuirks() {
std::string modelName = getPropertyString("ro.soc.model");
isSM8150 = (modelName == "SDM8150");
}
virtual ~QualcommDeviceQuirks() = default;
int32_t getExclusiveBottomMarginInBursts() const override {
return kBottomMargin;
}
bool isMMapSafe(const AudioStreamBuilder &builder) override {
// See https://github.com/google/oboe/issues/1121#issuecomment-897957749
bool isMMapBroken = false;
if (isSM8150 && (getSdkVersion() <= __ANDROID_API_P__)) {
LOGI("QuirksManager::%s() MMAP not actually supported on this chip."
" Switching off MMAP.", __func__);
isMMapBroken = true;
}
return !isMMapBroken;
}
private:
bool isSM8150 = false;
static constexpr int32_t kBottomMargin = 1;
};
QuirksManager::QuirksManager() {
std::string manufacturer = getPropertyString("ro.product.manufacturer");
if (manufacturer == "samsung") {
mDeviceQuirks = std::make_unique<SamsungDeviceQuirks>();
} else {
mDeviceQuirks = std::make_unique<DeviceQuirks>();
std::string productManufacturer = getPropertyString("ro.product.manufacturer");
if (productManufacturer == "samsung") {
std::string arch = getPropertyString("ro.arch");
bool isExynos = (arch.rfind("exynos", 0) == 0); // starts with?
if (isExynos) {
mDeviceQuirks = std::make_unique<SamsungExynosDeviceQuirks>();
}
}
if (!mDeviceQuirks) {
std::string socManufacturer = getPropertyString("ro.soc.manufacturer");
if (socManufacturer == "Qualcomm") {
// This may include Samsung Mobile devices.
mDeviceQuirks = std::make_unique<QualcommDeviceQuirks>();
} else {
mDeviceQuirks = std::make_unique<DeviceQuirks>();
}
}
}
@ -94,7 +201,30 @@ bool QuirksManager::isConversionNeeded(
const bool isInput = builder.getDirection() == Direction::Input;
const bool isFloat = builder.getFormat() == AudioFormat::Float;
// If a SAMPLE RATE is specified for low latency then let the native code choose an optimal rate.
// There are multiple bugs involving using callback with a specified callback size.
// Issue #778: O to Q had a problem with Legacy INPUT streams for FLOAT streams
// and a specified callback size. It would assert because of a bad buffer size.
//
// Issue #973: O to R had a problem with Legacy output streams using callback and a specified callback size.
// An AudioTrack stream could still be running when the AAudio FixedBlockReader was closed.
// Internally b/161914201#comment25
//
// Issue #983: O to R would glitch if the framesPerCallback was too small.
//
// Most of these problems were related to Legacy stream. MMAP was OK. But we don't
// know if we will get an MMAP stream. So, to be safe, just do the conversion in Oboe.
if (OboeGlobals::areWorkaroundsEnabled()
&& builder.willUseAAudio()
&& builder.isDataCallbackSpecified()
&& builder.getFramesPerDataCallback() != 0
&& getSdkVersion() <= __ANDROID_API_R__) {
LOGI("QuirksManager::%s() avoid setFramesPerCallback(n>0)", __func__);
childBuilder.setFramesPerCallback(oboe::Unspecified);
conversionNeeded = true;
}
// If a SAMPLE RATE is specified for low latency, let the native code choose an optimal rate.
// This isn't really a workaround. It is an Oboe feature that is convenient to place here.
// TODO There may be a problem if the devices supports low latency
// at a higher rate than the default.
if (builder.getSampleRate() != oboe::Unspecified
@ -107,7 +237,8 @@ bool QuirksManager::isConversionNeeded(
// Data Format
// OpenSL ES and AAudio before P do not support FAST path for FLOAT capture.
if (isFloat
if (OboeGlobals::areWorkaroundsEnabled()
&& isFloat
&& isInput
&& builder.isFormatConversionAllowed()
&& isLowLatency
@ -115,24 +246,59 @@ bool QuirksManager::isConversionNeeded(
) {
childBuilder.setFormat(AudioFormat::I16); // needed for FAST track
conversionNeeded = true;
LOGI("QuirksManager::%s() forcing internal format to I16 for low latency", __func__);
}
// Channel Count
if (builder.getChannelCount() != oboe::Unspecified
&& builder.isChannelConversionAllowed()) {
if (OboeGlobals::areWorkaroundsEnabled()
&& builder.getChannelCount() == 2 // stereo?
&& isInput
&& isLowLatency
&& (!builder.willUseAAudio() && (getSdkVersion() == __ANDROID_API_O__))) {
// Workaround for heap size regression in O.
// b/66967812 AudioRecord does not allow FAST track for stereo capture in O
childBuilder.setChannelCount(1);
conversionNeeded = true;
}
// Note that MMAP does not support mono in 8.1. But that would only matter on Pixel 1
// phones and they have almost all been updated to 9.0.
// Add quirk for float output when needed.
if (OboeGlobals::areWorkaroundsEnabled()
&& isFloat
&& !isInput
&& builder.isFormatConversionAllowed()
&& mDeviceQuirks->shouldConvertFloatToI16ForOutputStreams()
) {
childBuilder.setFormat(AudioFormat::I16);
conversionNeeded = true;
LOGI("QuirksManager::%s() float was requested but not supported on pre-L devices "
"and some devices like Vivo devices may have issues on L devices, "
"creating an underlying I16 stream and using format conversion to provide a float "
"stream", __func__);
}
// Channel Count conversions
if (OboeGlobals::areWorkaroundsEnabled()
&& builder.isChannelConversionAllowed()
&& builder.getChannelCount() == kChannelCountStereo
&& isInput
&& isLowLatency
&& (!builder.willUseAAudio() && (getSdkVersion() == __ANDROID_API_O__))
) {
// Workaround for heap size regression in O.
// b/66967812 AudioRecord does not allow FAST track for stereo capture in O
childBuilder.setChannelCount(kChannelCountMono);
conversionNeeded = true;
LOGI("QuirksManager::%s() using mono internally for low latency on O", __func__);
} else if (OboeGlobals::areWorkaroundsEnabled()
&& builder.getChannelCount() == kChannelCountMono
&& isInput
&& mDeviceQuirks->isMonoMMapActuallyStereo()
&& builder.willUseAAudio()
// Note: we might use this workaround on a device that supports
// MMAP but will use Legacy for this stream. But this will only happen
// on devices that have the broken mono.
&& mDeviceQuirks->isAAudioMMapPossible(builder)
) {
// Workaround for mono actually running in stereo mode.
childBuilder.setChannelCount(kChannelCountStereo); // Use stereo and extract first channel.
conversionNeeded = true;
LOGI("QuirksManager::%s() using stereo internally to avoid broken mono", __func__);
}
// Note that MMAP does not support mono in 8.1. But that would only matter on Pixel 1
// phones and they have almost all been updated to 9.0.
return conversionNeeded;
}
bool QuirksManager::isMMapSafe(AudioStreamBuilder &builder) {
if (!OboeGlobals::areWorkaroundsEnabled()) return true;
return mDeviceQuirks->isMMapSafe(builder);
}

View File

@ -21,6 +21,10 @@
#include <oboe/AudioStreamBuilder.h>
#include <aaudio/AudioStreamAAudio.h>
#ifndef __ANDROID_API_R__
#define __ANDROID_API_R__ 30
#endif
namespace oboe {
/**
@ -91,6 +95,20 @@ public:
return kDefaultTopMarginInBursts;
}
// On some devices, you can open a mono stream but it is actually running in stereo!
virtual bool isMonoMMapActuallyStereo() const {
return false;
}
virtual bool isAAudioMMapPossible(const AudioStreamBuilder &builder) const;
virtual bool isMMapSafe(const AudioStreamBuilder & /* builder */ ) {
return true;
}
// On some devices, Float does not work so it should be converted to I16.
static bool shouldConvertFloatToI16ForOutputStreams();
static constexpr int32_t kDefaultBottomMarginInBursts = 0;
static constexpr int32_t kDefaultTopMarginInBursts = 0;
@ -98,10 +116,16 @@ public:
// b/129545119 | AAudio Legacy allows setBufferSizeInFrames too low
// Fixed in Q
static constexpr int32_t kLegacyBottomMarginInBursts = 1;
static constexpr int32_t kCommonNativeRate = 48000; // very typical native sample rate
};
bool isMMapSafe(AudioStreamBuilder &builder);
private:
static constexpr int32_t kChannelCountMono = 1;
static constexpr int32_t kChannelCountStereo = 2;
std::unique_ptr<DeviceQuirks> mDeviceQuirks{};
};

View File

@ -32,7 +32,8 @@ class SourceI16Caller : public AudioSourceCaller {
public:
SourceI16Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, sizeof(int16_t)) {
mConversionBuffer = std::make_unique<int16_t[]>(channelCount * output.getFramesPerBuffer());
mConversionBuffer = std::make_unique<int16_t[]>(static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
}
int32_t onProcess(int32_t numFrames) override;

View File

@ -0,0 +1,56 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "flowgraph/FlowGraphNode.h"
#include "SourceI24Caller.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace oboe;
using namespace flowgraph;
int32_t SourceI24Caller::onProcess(int32_t numFrames) {
int32_t numBytes = mStream->getBytesPerFrame() * numFrames;
int32_t bytesRead = mBlockReader.read((uint8_t *) mConversionBuffer.get(), numBytes);
int32_t framesRead = bytesRead / mStream->getBytesPerFrame();
float *floatData = output.getBuffer();
const uint8_t *byteData = mConversionBuffer.get();
int32_t numSamples = framesRead * output.getSamplesPerFrame();
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_p24(floatData, byteData, numSamples);
#else
static const float scale = 1. / (float)(1UL << 31);
for (int i = 0; i < numSamples; i++) {
// Assemble the data assuming Little Endian format.
int32_t pad = byteData[2];
pad <<= 8;
pad |= byteData[1];
pad <<= 8;
pad |= byteData[0];
pad <<= 8; // Shift to 32 bit data so the sign is correct.
byteData += kBytesPerI24Packed;
*floatData++ = pad * scale; // scale to range -1.0 to 1.0
}
#endif
return framesRead;
}

View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_SOURCE_I24_CALLER_H
#define OBOE_SOURCE_I24_CALLER_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "AudioSourceCaller.h"
#include "FixedBlockReader.h"
namespace oboe {
/**
* AudioSource that uses callback to get more data.
*/
class SourceI24Caller : public AudioSourceCaller {
public:
SourceI24Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, kBytesPerI24Packed) {
mConversionBuffer = std::make_unique<uint8_t[]>(static_cast<size_t>(kBytesPerI24Packed)
* static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
}
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI24Caller";
}
private:
std::unique_ptr<uint8_t[]> mConversionBuffer;
static constexpr int kBytesPerI24Packed = 3;
};
}
#endif //OBOE_SOURCE_I16_CALLER_H

View File

@ -0,0 +1,47 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "flowgraph/FlowGraphNode.h"
#include "SourceI32Caller.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace oboe;
using namespace flowgraph;
int32_t SourceI32Caller::onProcess(int32_t numFrames) {
int32_t numBytes = mStream->getBytesPerFrame() * numFrames;
int32_t bytesRead = mBlockReader.read((uint8_t *) mConversionBuffer.get(), numBytes);
int32_t framesRead = bytesRead / mStream->getBytesPerFrame();
float *floatData = output.getBuffer();
const int32_t *intData = mConversionBuffer.get();
int32_t numSamples = framesRead * output.getSamplesPerFrame();
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_i32(floatData, shortData, numSamples);
#else
for (int i = 0; i < numSamples; i++) {
*floatData++ = *intData++ * kScale;
}
#endif
return framesRead;
}

View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_SOURCE_I32_CALLER_H
#define OBOE_SOURCE_I32_CALLER_H
#include <memory.h>
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "AudioSourceCaller.h"
#include "FixedBlockReader.h"
namespace oboe {
/**
* AudioSource that uses callback to get more data.
*/
class SourceI32Caller : public AudioSourceCaller {
public:
SourceI32Caller(int32_t channelCount, int32_t framesPerCallback)
: AudioSourceCaller(channelCount, framesPerCallback, sizeof(int32_t)) {
mConversionBuffer = std::make_unique<int32_t[]>(static_cast<size_t>(channelCount)
* static_cast<size_t>(output.getFramesPerBuffer()));
}
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI32Caller";
}
private:
std::unique_ptr<int32_t[]> mConversionBuffer;
static constexpr float kScale = 1.0 / (1UL << 31);
};
}
#endif //OBOE_SOURCE_I32_CALLER_H

View File

@ -60,6 +60,12 @@ int32_t convertFormatToSizeInBytes(AudioFormat format) {
case AudioFormat::Float:
size = sizeof(float);
break;
case AudioFormat::I24:
size = 3; // packed 24-bit data
break;
case AudioFormat::I32:
size = sizeof(int32_t);
break;
default:
break;
}
@ -98,6 +104,8 @@ const char *convertToText<AudioFormat>(AudioFormat format) {
case AudioFormat::Unspecified: return "Unspecified";
case AudioFormat::I16: return "I16";
case AudioFormat::Float: return "Float";
case AudioFormat::I24: return "I24";
case AudioFormat::I32: return "I32";
default: return "Unrecognized format";
}
}
@ -183,7 +191,7 @@ const char *convertToText<AudioStream*>(AudioStream* stream) {
<<"BufferCapacity: "<<stream->getBufferCapacityInFrames()<<std::endl
<<"BufferSize: "<<stream->getBufferSizeInFrames()<<std::endl
<<"FramesPerBurst: "<< stream->getFramesPerBurst()<<std::endl
<<"FramesPerCallback: "<<stream->getFramesPerCallback()<<std::endl
<<"FramesPerDataCallback: "<<stream->getFramesPerDataCallback()<<std::endl
<<"SampleRate: "<<stream->getSampleRate()<<std::endl
<<"ChannelCount: "<<stream->getChannelCount()<<std::endl
<<"Format: "<<oboe::convertToText(stream->getFormat())<<std::endl
@ -302,4 +310,8 @@ int getSdkVersion() {
return sCachedSdkVersion;
}
int getChannelCountFromChannelMask(ChannelMask channelMask) {
return __builtin_popcount(static_cast<uint32_t>(channelMask));
}
}// namespace oboe

View File

@ -14,18 +14,14 @@
* limitations under the License.
*/
#include <stdint.h>
#include <time.h>
#include <memory.h>
#include <cassert>
#include <algorithm>
#include <memory.h>
#include <stdint.h>
#include "common/OboeDebug.h"
#include "fifo/FifoControllerBase.h"
#include "oboe/FifoControllerBase.h"
#include "fifo/FifoController.h"
#include "fifo/FifoControllerIndirect.h"
#include "fifo/FifoBuffer.h"
#include "common/AudioClock.h"
#include "oboe/FifoBuffer.h"
namespace oboe {

View File

@ -14,9 +14,8 @@
* limitations under the License.
*/
#include <cassert>
#include <sys/types.h>
#include "FifoControllerBase.h"
#include <stdint.h>
#include "FifoController.h"
namespace oboe {

View File

@ -17,9 +17,10 @@
#ifndef NATIVEOBOE_FIFOCONTROLLER_H
#define NATIVEOBOE_FIFOCONTROLLER_H
#include <sys/types.h>
#include "FifoControllerBase.h"
#include <atomic>
#include <stdint.h>
#include "oboe/FifoControllerBase.h"
namespace oboe {

View File

@ -14,14 +14,11 @@
* limitations under the License.
*/
#include "FifoControllerBase.h"
#include <cassert>
#include <sys/types.h>
#include <algorithm>
#include "FifoControllerBase.h"
#include <cassert>
#include <stdint.h>
#include "common/OboeDebug.h"
#include "oboe/FifoControllerBase.h"
namespace oboe {

View File

@ -14,6 +14,7 @@
* limitations under the License.
*/
#include <stdint.h>
#include "FifoControllerIndirect.h"

View File

@ -17,8 +17,10 @@
#ifndef NATIVEOBOE_FIFOCONTROLLERINDIRECT_H
#define NATIVEOBOE_FIFOCONTROLLERINDIRECT_H
#include "FifoControllerBase.h"
#include <atomic>
#include <stdint.h>
#include "oboe/FifoControllerBase.h"
namespace oboe {

View File

@ -0,0 +1,52 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "ChannelCountConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ChannelCountConverter::ChannelCountConverter(
int32_t inputChannelCount,
int32_t outputChannelCount)
: input(*this, inputChannelCount)
, output(*this, outputChannelCount) {
}
ChannelCountConverter::~ChannelCountConverter() = default;
int32_t ChannelCountConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
int32_t inputChannelCount = input.getSamplesPerFrame();
int32_t outputChannelCount = output.getSamplesPerFrame();
for (int i = 0; i < numFrames; i++) {
int inputChannel = 0;
for (int outputChannel = 0; outputChannel < outputChannelCount; outputChannel++) {
// Copy input channels to output channels.
// Wrap if we run out of inputs.
// Discard if we run out of outputs.
outputBuffer[outputChannel] = inputBuffer[inputChannel];
inputChannel = (inputChannel == inputChannelCount)
? 0 : inputChannel + 1;
}
inputBuffer += inputChannelCount;
outputBuffer += outputChannelCount;
}
return numFrames;
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
#define FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Change the number of number of channels without mixing.
* When increasing the channel count, duplicate input channels.
* When decreasing the channel count, drop input channels.
*/
class ChannelCountConverter : public FlowGraphNode {
public:
explicit ChannelCountConverter(
int32_t inputChannelCount,
int32_t outputChannelCount);
virtual ~ChannelCountConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "ChannelCountConverter";
}
FlowGraphPortFloatInput input;
FlowGraphPortFloatOutput output;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CHANNEL_COUNT_CONVERTER_H

View File

@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "ClipToRange.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ClipToRange::ClipToRange(int32_t channelCount)
: FlowGraphFilter(channelCount) {

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// This is 3 dB, (10^(3/20)), to match the maximum headroom in AudioTrack for float data.
// It is designed to allow occasional transient peaks.
@ -63,6 +63,6 @@ private:
float mMaximum = kDefaultMaxHeadroom;
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_CLIP_TO_RANGE_H

View File

@ -19,26 +19,24 @@
#include <sys/types.h>
#include "FlowGraphNode.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
/***************************************************************************/
int32_t FlowGraphNode::pullData(int64_t framePosition, int32_t numFrames) {
int32_t FlowGraphNode::pullData(int32_t numFrames, int64_t callCount) {
int32_t frameCount = numFrames;
// Prevent recursion and multiple execution of nodes.
if (framePosition <= mLastFramePosition && !mBlockRecursion) {
mBlockRecursion = true; // for cyclic graphs
if (callCount > mLastCallCount) {
mLastCallCount = callCount;
if (mDataPulledAutomatically) {
// Pull from all the upstream nodes.
for (auto &port : mInputPorts) {
// TODO fix bug of leaving unused data in some ports if using multiple AudioSource
frameCount = port.get().pullData(framePosition, frameCount);
frameCount = port.get().pullData(callCount, frameCount);
}
}
if (frameCount > 0) {
frameCount = onProcess(frameCount);
}
mLastFramePosition += frameCount;
mBlockRecursion = false;
mLastFrameCount = frameCount;
} else {
frameCount = mLastFrameCount;
@ -60,6 +58,7 @@ void FlowGraphNode::pullReset() {
void FlowGraphNode::reset() {
mLastFrameCount = 0;
mLastCallCount = kInitialCallCount;
}
/***************************************************************************/
@ -69,14 +68,14 @@ FlowGraphPortFloat::FlowGraphPortFloat(FlowGraphNode &parent,
: FlowGraphPort(parent, samplesPerFrame)
, mFramesPerBuffer(framesPerBuffer)
, mBuffer(nullptr) {
size_t numFloats = static_cast<size_t>(framesPerBuffer * getSamplesPerFrame());
size_t numFloats = static_cast<size_t>(framesPerBuffer) * getSamplesPerFrame();
mBuffer = std::make_unique<float[]>(numFloats);
}
/***************************************************************************/
int32_t FlowGraphPortFloatOutput::pullData(int64_t framePosition, int32_t numFrames) {
int32_t FlowGraphPortFloatOutput::pullData(int64_t callCount, int32_t numFrames) {
numFrames = std::min(getFramesPerBuffer(), numFrames);
return mContainingNode.pullData(framePosition, numFrames);
return mContainingNode.pullData(numFrames, callCount);
}
void FlowGraphPortFloatOutput::pullReset() {
@ -93,10 +92,10 @@ void FlowGraphPortFloatOutput::disconnect(FlowGraphPortFloatInput *port) {
}
/***************************************************************************/
int32_t FlowGraphPortFloatInput::pullData(int64_t framePosition, int32_t numFrames) {
int32_t FlowGraphPortFloatInput::pullData(int64_t callCount, int32_t numFrames) {
return (mConnected == nullptr)
? std::min(getFramesPerBuffer(), numFrames)
: mConnected->pullData(framePosition, numFrames);
: mConnected->pullData(callCount, numFrames);
}
void FlowGraphPortFloatInput::pullReset() {
if (mConnected != nullptr) mConnected->pullReset();
@ -109,3 +108,7 @@ float *FlowGraphPortFloatInput::getBuffer() {
return mConnected->getBuffer();
}
}
int32_t FlowGraphSink::pullData(int32_t numFrames) {
return FlowGraphNode::pullData(numFrames, getLastCallCount() + 1);
}

View File

@ -38,11 +38,26 @@
// TODO Review use of raw pointers for connect(). Maybe use smart pointers but need to avoid
// run-time deallocation in audio thread.
// Set this to 1 if using it inside the Android framework.
// This code is kept here so that it can be moved easily between Oboe and AAudio.
#define FLOWGRAPH_ANDROID_INTERNAL 0
// Set flags FLOWGRAPH_ANDROID_INTERNAL and FLOWGRAPH_OUTER_NAMESPACE based on whether compiler
// flag __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe and not aaudio.
namespace flowgraph {
#ifndef FLOWGRAPH_ANDROID_INTERNAL
#ifdef __ANDROID_NDK__
#define FLOWGRAPH_ANDROID_INTERNAL 0
#else
#define FLOWGRAPH_ANDROID_INTERNAL 1
#endif // __ANDROID_NDK__
#endif // FLOWGRAPH_ANDROID_INTERNAL
#ifndef FLOWGRAPH_OUTER_NAMESPACE
#ifdef __ANDROID_NDK__
#define FLOWGRAPH_OUTER_NAMESPACE oboe
#else
#define FLOWGRAPH_OUTER_NAMESPACE aaudio
#endif // __ANDROID_NDK__
#endif // FLOWGRAPH_OUTER_NAMESPACE
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
// Default block size that can be overridden when the FlowGraphPortFloat is created.
// If it is too small then we will have too much overhead from switching between nodes.
@ -58,7 +73,7 @@ class FlowGraphPortFloatInput;
*/
class FlowGraphNode {
public:
FlowGraphNode() {}
FlowGraphNode() = default;
virtual ~FlowGraphNode() = default;
/**
@ -71,15 +86,17 @@ public:
virtual int32_t onProcess(int32_t numFrames) = 0;
/**
* If the framePosition is at or after the last frame position then call onProcess().
* If the callCount is at or after the previous callCount then call
* pullData on all of the upstreamNodes.
* Then call onProcess().
* This prevents infinite recursion in case of cyclic graphs.
* It also prevents nodes upstream from a branch from being executed twice.
*
* @param framePosition
* @param callCount
* @param numFrames
* @return number of frames valid
*/
int32_t pullData(int64_t framePosition, int32_t numFrames);
int32_t pullData(int32_t numFrames, int64_t callCount);
/**
* Recursively reset all the nodes in the graph, starting from a Sink.
@ -94,7 +111,7 @@ public:
virtual void reset();
void addInputPort(FlowGraphPort &port) {
mInputPorts.push_back(port);
mInputPorts.emplace_back(port);
}
bool isDataPulledAutomatically() const {
@ -118,12 +135,14 @@ public:
return "FlowGraph";
}
int64_t getLastFramePosition() {
return mLastFramePosition;
int64_t getLastCallCount() {
return mLastCallCount;
}
protected:
int64_t mLastFramePosition = 0;
static constexpr int64_t kInitialCallCount = -1;
int64_t mLastCallCount = kInitialCallCount;
std::vector<std::reference_wrapper<FlowGraphPort>> mInputPorts;
@ -149,6 +168,8 @@ public:
, mSamplesPerFrame(samplesPerFrame) {
}
virtual ~FlowGraphPort() = default;
// Ports are often declared public. So let's make them non-copyable.
FlowGraphPort(const FlowGraphPort&) = delete;
FlowGraphPort& operator=(const FlowGraphPort&) = delete;
@ -385,7 +406,7 @@ public:
FlowGraphPortFloatInput input;
/**
* Dummy processor. The work happens in the read() method.
* Do nothing. The work happens in the read() method.
*
* @param numFrames
* @return number of frames actually processed
@ -394,8 +415,15 @@ public:
return numFrames;
}
virtual int32_t read(int64_t framePosition, void *data, int32_t numFrames) = 0;
virtual int32_t read(void *data, int32_t numFrames) = 0;
protected:
/**
* Pull data through the graph using this nodes last callCount.
* @param numFrames
* @return
*/
int32_t pullData(int32_t numFrames);
};
/***************************************************************************/
@ -417,6 +445,6 @@ public:
FlowGraphPortFloatOutput output;
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif /* FLOWGRAPH_FLOW_GRAPH_NODE_H */

View File

@ -0,0 +1,55 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_UTILITIES_H
#define FLOWGRAPH_UTILITIES_H
#include <unistd.h>
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
class FlowgraphUtilities {
public:
// This was copied from audio_utils/primitives.h
/**
* Convert a single-precision floating point value to a Q0.31 integer value.
* Rounds to nearest, ties away from 0.
*
* Values outside the range [-1.0, 1.0) are properly clamped to -2147483648 and 2147483647,
* including -Inf and +Inf. NaN values are considered undefined, and behavior may change
* depending on hardware and future implementation of this function.
*/
static int32_t clamp32FromFloat(float f)
{
static const float scale = (float)(1UL << 31);
static const float limpos = 1.;
static const float limneg = -1.;
if (f <= limneg) {
return INT32_MIN;
} else if (f >= limpos) {
return INT32_MAX;
}
f *= scale;
/* integer conversion is through truncation (though int to float is not).
* ensure that we round to nearest, ties away from 0.
*/
return f > 0 ? f + 0.5 : f - 0.5;
}
};
#endif // FLOWGRAPH_UTILITIES_H

View File

@ -18,7 +18,7 @@
#include "ManyToMultiConverter.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
ManyToMultiConverter::ManyToMultiConverter(int32_t channelCount)
: inputs(channelCount)

View File

@ -23,6 +23,8 @@
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Combine multiple mono inputs into one interleaved multi-channel output.
*/
@ -34,7 +36,7 @@ public:
int32_t onProcess(int numFrames) override;
void setEnabled(bool enabled) {}
void setEnabled(bool /*enabled*/) {}
std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatInput>> inputs;
flowgraph::FlowGraphPortFloatOutput output;
@ -46,4 +48,6 @@ public:
private:
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MANY_TO_MULTI_CONVERTER_H

View File

@ -0,0 +1,46 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "MonoBlend.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoBlend::MonoBlend(int32_t channelCount)
: FlowGraphFilter(channelCount)
, mInvChannelCount(1. / channelCount)
{
}
int32_t MonoBlend::onProcess(int32_t numFrames) {
int32_t channelCount = output.getSamplesPerFrame();
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
for (size_t i = 0; i < numFrames; ++i) {
float accum = 0;
for (size_t j = 0; j < channelCount; ++j) {
accum += *inputBuffer++;
}
accum *= mInvChannelCount;
for (size_t j = 0; j < channelCount; ++j) {
*outputBuffer++ = accum;
}
}
return numFrames;
}

View File

@ -0,0 +1,48 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MONO_BLEND_H
#define FLOWGRAPH_MONO_BLEND_H
#include <sys/types.h>
#include <unistd.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Combine data between multiple channels so each channel is an average
* of all channels.
*/
class MonoBlend : public FlowGraphFilter {
public:
explicit MonoBlend(int32_t channelCount);
virtual ~MonoBlend() = default;
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MonoBlend";
}
private:
const float mInvChannelCount;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_BLEND

View File

@ -18,15 +18,13 @@
#include "FlowGraphNode.h"
#include "MonoToMultiConverter.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MonoToMultiConverter::MonoToMultiConverter(int32_t channelCount)
MonoToMultiConverter::MonoToMultiConverter(int32_t outputChannelCount)
: input(*this, 1)
, output(*this, channelCount) {
, output(*this, outputChannelCount) {
}
MonoToMultiConverter::~MonoToMultiConverter() { }
int32_t MonoToMultiConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();

View File

@ -22,17 +22,17 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a monophonic stream to a multi-channel stream
* Convert a monophonic stream to a multi-channel interleaved stream
* with the same signal on each channel.
*/
class MonoToMultiConverter : public FlowGraphNode {
public:
explicit MonoToMultiConverter(int32_t channelCount);
explicit MonoToMultiConverter(int32_t outputChannelCount);
virtual ~MonoToMultiConverter();
virtual ~MonoToMultiConverter() = default;
int32_t onProcess(int32_t numFrames) override;
@ -44,6 +44,6 @@ public:
FlowGraphPortFloatOutput output;
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MONO_TO_MULTI_CONVERTER_H

View File

@ -0,0 +1,47 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "MultiToManyConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToManyConverter::MultiToManyConverter(int32_t channelCount)
: outputs(channelCount)
, input(*this, channelCount) {
for (int i = 0; i < channelCount; i++) {
outputs[i] = std::make_unique<FlowGraphPortFloatOutput>(*this, 1);
}
}
MultiToManyConverter::~MultiToManyConverter() = default;
int32_t MultiToManyConverter::onProcess(int32_t numFrames) {
int32_t channelCount = input.getSamplesPerFrame();
for (int ch = 0; ch < channelCount; ch++) {
const float *inputBuffer = input.getBuffer() + ch;
float *outputBuffer = outputs[ch]->getBuffer();
for (int i = 0; i < numFrames; i++) {
*outputBuffer++ = *inputBuffer;
inputBuffer += channelCount;
}
}
return numFrames;
}

View File

@ -0,0 +1,49 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
#define FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to multiple mono-channel
* outputs
*/
class MultiToManyConverter : public FlowGraphNode {
public:
explicit MultiToManyConverter(int32_t channelCount);
virtual ~MultiToManyConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MultiToManyConverter";
}
std::vector<std::unique_ptr<flowgraph::FlowGraphPortFloatOutput>> outputs;
flowgraph::FlowGraphPortFloatInput input;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MULTI_TO_MANY_CONVERTER_H

View File

@ -0,0 +1,41 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "FlowGraphNode.h"
#include "MultiToMonoConverter.h"
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
MultiToMonoConverter::MultiToMonoConverter(int32_t inputChannelCount)
: input(*this, inputChannelCount)
, output(*this, 1) {
}
MultiToMonoConverter::~MultiToMonoConverter() = default;
int32_t MultiToMonoConverter::onProcess(int32_t numFrames) {
const float *inputBuffer = input.getBuffer();
float *outputBuffer = output.getBuffer();
int32_t channelCount = input.getSamplesPerFrame();
for (int i = 0; i < numFrames; i++) {
// read first channel of multi stream, write many
*outputBuffer++ = *inputBuffer;
inputBuffer += channelCount;
}
return numFrames;
}

View File

@ -0,0 +1,49 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
#define FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* Convert a multi-channel interleaved stream to a monophonic stream
* by extracting channel[0].
*/
class MultiToMonoConverter : public FlowGraphNode {
public:
explicit MultiToMonoConverter(int32_t inputChannelCount);
virtual ~MultiToMonoConverter();
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "MultiToMonoConverter";
}
FlowGraphPortFloatInput input;
FlowGraphPortFloatOutput output;
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_MULTI_TO_MONO_CONVERTER_H

View File

@ -19,7 +19,7 @@
#include "FlowGraphNode.h"
#include "RampLinear.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
RampLinear::RampLinear(int32_t channelCount)
: FlowGraphFilter(channelCount) {
@ -32,6 +32,10 @@ void RampLinear::setLengthInFrames(int32_t frames) {
void RampLinear::setTarget(float target) {
mTarget.store(target);
// If the ramp has not been used then start immediately at this level.
if (mLastCallCount == kInitialCallCount) {
forceCurrent(target);
}
}
float RampLinear::interpolateCurrent() {

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* When the target is modified then the output will ramp smoothly
@ -91,6 +91,6 @@ private:
float mLevelTo = 0.0f;
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_RAMP_LINEAR_H

View File

@ -16,20 +16,27 @@
#include "SampleRateConverter.h"
using namespace flowgraph;
using namespace resampler;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
SampleRateConverter::SampleRateConverter(int32_t channelCount, MultiChannelResampler &resampler)
SampleRateConverter::SampleRateConverter(int32_t channelCount,
MultiChannelResampler &resampler)
: FlowGraphFilter(channelCount)
, mResampler(resampler) {
setDataPulledAutomatically(false);
}
void SampleRateConverter::reset() {
FlowGraphNode::reset();
mInputCursor = kInitialCallCount;
}
// Return true if there is a sample available.
bool SampleRateConverter::isInputAvailable() {
// If we have consumed all of the input data then go out and get some more.
if (mInputCursor >= mNumValidInputFrames) {
mNumValidInputFrames = input.pullData(mInputFramePosition, input.getFramesPerBuffer());
mInputFramePosition += mNumValidInputFrames;
mInputCallCount++;
mNumValidInputFrames = input.pullData(mInputCallCount, input.getFramesPerBuffer());
mInputCursor = 0;
}
return (mInputCursor < mNumValidInputFrames);

View File

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef OBOE_SAMPLE_RATE_CONVERTER_H
#define OBOE_SAMPLE_RATE_CONVERTER_H
#ifndef FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
#define FLOWGRAPH_SAMPLE_RATE_CONVERTER_H
#include <unistd.h>
#include <sys/types.h>
@ -23,11 +23,12 @@
#include "FlowGraphNode.h"
#include "resampler/MultiChannelResampler.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SampleRateConverter : public FlowGraphFilter {
public:
explicit SampleRateConverter(int32_t channelCount, resampler::MultiChannelResampler &mResampler);
explicit SampleRateConverter(int32_t channelCount,
resampler::MultiChannelResampler &mResampler);
virtual ~SampleRateConverter() = default;
@ -37,6 +38,8 @@ public:
return "SampleRateConverter";
}
void reset() override;
private:
// Return true if there is a sample available.
@ -47,10 +50,14 @@ private:
resampler::MultiChannelResampler &mResampler;
int32_t mInputCursor = 0;
int32_t mNumValidInputFrames = 0;
int64_t mInputFramePosition = 0; // monotonic counter of input frames used for pullData
int32_t mInputCursor = 0; // offset into the input port buffer
int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
// We need our own callCount for upstream calls because calls occur at a different rate.
// This means we cannot have cyclic graphs or merges that contain an SRC.
int64_t mInputCallCount = 0;
};
} /* namespace flowgraph */
#endif //OBOE_SAMPLE_RATE_CONVERTER_H
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SAMPLE_RATE_CONVERTER_H

View File

@ -19,22 +19,20 @@
#include "FlowGraphNode.h"
#include "SinkFloat.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkFloat::SinkFloat(int32_t channelCount)
: FlowGraphSink(channelCount) {
}
int32_t SinkFloat::read(int64_t framePosition, void *data, int32_t numFrames) {
// printf("SinkFloat::read(,,%d)\n", numFrames);
int32_t SinkFloat::read(void *data, int32_t numFrames) {
float *floatData = (float *) data;
int32_t channelCount = input.getSamplesPerFrame();
const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
// Run the graph and pull data through the input port.
int32_t framesPulled = pullData(framePosition, framesLeft);
// printf("SinkFloat::read: framesLeft = %d, framesPulled = %d\n", framesLeft, framesPulled);
int32_t framesPulled = pullData(framesLeft);
if (framesPulled <= 0) {
break;
}
@ -43,8 +41,6 @@ int32_t SinkFloat::read(int64_t framePosition, void *data, int32_t numFrames) {
memcpy(floatData, signal, numSamples * sizeof(float));
floatData += numSamples;
framesLeft -= framesPulled;
framePosition += framesPulled;
}
// printf("SinkFloat returning %d\n", numFrames - framesLeft);
return numFrames - framesLeft;
}

View File

@ -23,7 +23,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as 32-bit floats.
@ -31,14 +31,15 @@ namespace flowgraph {
class SinkFloat : public FlowGraphSink {
public:
explicit SinkFloat(int32_t channelCount);
~SinkFloat() override = default;
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override;
int32_t read(void *data, int32_t numFrames) override;
const char *getName() override {
return "SinkFloat";
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_FLOAT_H

View File

@ -23,19 +23,19 @@
#include <audio_utils/primitives.h>
#endif
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI16::SinkI16(int32_t channelCount)
: FlowGraphSink(channelCount) {}
int32_t SinkI16::read(int64_t framePosition, void *data, int32_t numFrames) {
int32_t SinkI16::read(void *data, int32_t numFrames) {
int16_t *shortData = (int16_t *) data;
const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
// Run the graph and pull data through the input port.
int32_t framesRead = pullData(framePosition, framesLeft);
int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) {
break;
}
@ -52,7 +52,6 @@ int32_t SinkI16::read(int64_t framePosition, void *data, int32_t numFrames) {
}
#endif
framesLeft -= framesRead;
framePosition += framesRead;
}
return numFrames - framesLeft;
}

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as 16-bit signed integers.
@ -31,13 +31,13 @@ class SinkI16 : public FlowGraphSink {
public:
explicit SinkI16(int32_t channelCount);
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override;
int32_t read(void *data, int32_t numFrames) override;
const char *getName() override {
return "SinkI16";
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I16_H

View File

@ -25,19 +25,19 @@
#include <audio_utils/primitives.h>
#endif
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI24::SinkI24(int32_t channelCount)
: FlowGraphSink(channelCount) {}
int32_t SinkI24::read(int64_t framePosition, void *data, int32_t numFrames) {
int32_t SinkI24::read(void *data, int32_t numFrames) {
uint8_t *byteData = (uint8_t *) data;
const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
// Run the graph and pull data through the input port.
int32_t framesRead = pullData(framePosition, framesLeft);
int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) {
break;
}
@ -61,7 +61,6 @@ int32_t SinkI24::read(int64_t framePosition, void *data, int32_t numFrames) {
}
#endif
framesLeft -= framesRead;
framePosition += framesRead;
}
return numFrames - framesLeft;
}

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSink that lets you read data as packed 24-bit signed integers.
@ -32,13 +32,13 @@ class SinkI24 : public FlowGraphSink {
public:
explicit SinkI24(int32_t channelCount);
int32_t read(int64_t framePosition, void *data, int32_t numFrames) override;
int32_t read(void *data, int32_t numFrames) override;
const char *getName() override {
return "SinkI24";
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I24_H

View File

@ -0,0 +1,55 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "FlowGraphNode.h"
#include "FlowgraphUtilities.h"
#include "SinkI32.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SinkI32::SinkI32(int32_t channelCount)
: FlowGraphSink(channelCount) {}
int32_t SinkI32::read(void *data, int32_t numFrames) {
int32_t *intData = (int32_t *) data;
const int32_t channelCount = input.getSamplesPerFrame();
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
// Run the graph and pull data through the input port.
int32_t framesRead = pullData(framesLeft);
if (framesRead <= 0) {
break;
}
const float *signal = input.getBuffer();
int32_t numSamples = framesRead * channelCount;
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_i32_from_float(intData, signal, numSamples);
intData += numSamples;
signal += numSamples;
#else
for (int i = 0; i < numSamples; i++) {
*intData++ = FlowgraphUtilities::clamp32FromFloat(*signal++);
}
#endif
framesLeft -= framesRead;
}
return numFrames - framesLeft;
}

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SINK_I32_H
#define FLOWGRAPH_SINK_I32_H
#include <stdint.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SinkI32 : public FlowGraphSink {
public:
explicit SinkI32(int32_t channelCount);
~SinkI32() override = default;
int32_t read(void *data, int32_t numFrames) override;
const char *getName() override {
return "SinkI32";
}
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SINK_I32_H

View File

@ -14,13 +14,12 @@
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include <algorithm>
#include <unistd.h>
#include "FlowGraphNode.h"
#include "SourceFloat.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceFloat::SourceFloat(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
@ -28,11 +27,11 @@ SourceFloat::SourceFloat(int32_t channelCount)
int32_t SourceFloat::onProcess(int32_t numFrames) {
float *outputBuffer = output.getBuffer();
int32_t channelCount = output.getSamplesPerFrame();
const int32_t channelCount = output.getSamplesPerFrame();
int32_t framesLeft = mSizeInFrames - mFrameIndex;
int32_t framesToProcess = std::min(numFrames, framesLeft);
int32_t numSamples = framesToProcess * channelCount;
const int32_t framesLeft = mSizeInFrames - mFrameIndex;
const int32_t framesToProcess = std::min(numFrames, framesLeft);
const int32_t numSamples = framesToProcess * channelCount;
const float *floatBase = (float *) mData;
const float *floatData = &floatBase[mFrameIndex * channelCount];

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined float data.
@ -30,6 +30,7 @@ namespace flowgraph {
class SourceFloat : public FlowGraphSourceBuffered {
public:
explicit SourceFloat(int32_t channelCount);
~SourceFloat() override = default;
int32_t onProcess(int32_t numFrames) override;
@ -38,6 +39,6 @@ public:
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_FLOAT_H

View File

@ -24,7 +24,7 @@
#include <audio_utils/primitives.h>
#endif
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI16::SourceI16(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined 16-bit integer data.
*/
@ -37,6 +37,6 @@ public:
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I16_H

View File

@ -17,14 +17,14 @@
#include <algorithm>
#include <unistd.h>
#include "FlowGraphNode.h"
#include "SourceI24.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
#include "FlowGraphNode.h"
#include "SourceI24.h"
using namespace flowgraph;
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
constexpr int kBytesPerI24Packed = 3;

View File

@ -22,7 +22,7 @@
#include "FlowGraphNode.h"
namespace flowgraph {
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
/**
* AudioSource that reads a block of pre-defined 24-bit packed integer data.
@ -38,6 +38,6 @@ public:
}
};
} /* namespace flowgraph */
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I24_H

View File

@ -0,0 +1,54 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "FlowGraphNode.h"
#include "SourceI32.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI32::SourceI32(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
}
int32_t SourceI32::onProcess(int32_t numFrames) {
float *floatData = output.getBuffer();
const int32_t channelCount = output.getSamplesPerFrame();
const int32_t framesLeft = mSizeInFrames - mFrameIndex;
const int32_t framesToProcess = std::min(numFrames, framesLeft);
const int32_t numSamples = framesToProcess * channelCount;
const int32_t *intBase = static_cast<const int32_t *>(mData);
const int32_t *intData = &intBase[mFrameIndex * channelCount];
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_i32(floatData, intData, numSamples);
#else
for (int i = 0; i < numSamples; i++) {
*floatData++ = *intData++ * kScale;
}
#endif
mFrameIndex += framesToProcess;
return framesToProcess;
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SOURCE_I32_H
#define FLOWGRAPH_SOURCE_I32_H
#include <stdint.h>
#include "FlowGraphNode.h"
namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph {
class SourceI32 : public FlowGraphSourceBuffered {
public:
explicit SourceI32(int32_t channelCount);
~SourceI32() override = default;
int32_t onProcess(int32_t numFrames) override;
const char *getName() override {
return "SourceI32";
}
private:
static constexpr float kScale = 1.0 / (1UL << 31);
};
} /* namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph */
#endif //FLOWGRAPH_SOURCE_I32_H

View File

@ -19,7 +19,9 @@
#include <math.h>
namespace resampler {
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Calculate a HyperbolicCosineWindow window centered at 0.
@ -64,5 +66,6 @@ private:
double mInverseCoshAlpha = 1.0;
};
} // namespace resampler
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_HYPERBOLIC_COSINE_WINDOW_H

View File

@ -16,7 +16,7 @@
#include "IntegerRatio.h"
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
// Enough primes to cover the common sample rates.
static const int kPrimes[] = {

View File

@ -14,12 +14,14 @@
* limitations under the License.
*/
#ifndef OBOE_INTEGER_RATIO_H
#define OBOE_INTEGER_RATIO_H
#ifndef RESAMPLER_INTEGER_RATIO_H
#define RESAMPLER_INTEGER_RATIO_H
#include <sys/types.h>
namespace resampler {
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Represent the ratio of two integers.
@ -47,6 +49,6 @@ private:
int32_t mDenominator;
};
}
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_INTEGER_RATIO_H
#endif //RESAMPLER_INTEGER_RATIO_H

View File

@ -19,7 +19,9 @@
#include <math.h>
namespace resampler {
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Calculate a Kaiser window centered at 0.
@ -83,5 +85,6 @@ private:
double mInverseBesselBeta = 1.0;
};
} // namespace resampler
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_KAISER_WINDOW_H

View File

@ -16,7 +16,7 @@
#include "LinearResampler.h"
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
LinearResampler::LinearResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder) {

View File

@ -14,22 +14,24 @@
* limitations under the License.
*/
#ifndef OBOE_LINEAR_RESAMPLER_H
#define OBOE_LINEAR_RESAMPLER_H
#ifndef RESAMPLER_LINEAR_RESAMPLER_H
#define RESAMPLER_LINEAR_RESAMPLER_H
#include <memory>
#include <sys/types.h>
#include <unistd.h>
#include "MultiChannelResampler.h"
namespace resampler {
#include "MultiChannelResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Simple resampler that uses bi-linear interpolation.
*/
class LinearResampler : public MultiChannelResampler {
public:
LinearResampler(const MultiChannelResampler::Builder &builder);
explicit LinearResampler(const MultiChannelResampler::Builder &builder);
void writeFrame(const float *frame) override;
@ -40,5 +42,6 @@ private:
std::unique_ptr<float[]> mCurrentFrame;
};
} // namespace resampler
#endif //OBOE_LINEAR_RESAMPLER_H
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_LINEAR_RESAMPLER_H

View File

@ -25,11 +25,12 @@
#include "SincResampler.h"
#include "SincResamplerStereo.h"
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builder &builder)
: mNumTaps(builder.getNumTaps())
, mX(builder.getChannelCount() * builder.getNumTaps() * 2)
, mX(static_cast<size_t>(builder.getChannelCount())
* static_cast<size_t>(builder.getNumTaps()) * 2)
, mSingleFrame(builder.getChannelCount())
, mChannelCount(builder.getChannelCount())
{
@ -39,7 +40,7 @@ MultiChannelResampler::MultiChannelResampler(const MultiChannelResampler::Builde
ratio.reduce();
mNumerator = ratio.getNumerator();
mDenominator = ratio.getDenominator();
mIntegerPhase = mDenominator;
mIntegerPhase = mDenominator; // so we start with a write needed
}
// static factory method
@ -110,7 +111,7 @@ void MultiChannelResampler::writeFrame(const float *frame) {
if (--mCursor < 0) {
mCursor = getNumTaps() - 1;
}
float *dest = &mX[mCursor * getChannelCount()];
float *dest = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
int offset = getNumTaps() * getChannelCount();
for (int channel = 0; channel < getChannelCount(); channel++) {
// Write twice so we avoid having to wrap when reading.
@ -130,7 +131,7 @@ void MultiChannelResampler::generateCoefficients(int32_t inputRate,
int32_t numRows,
double phaseIncrement,
float normalizedCutoff) {
mCoefficients.resize(getNumTaps() * numRows);
mCoefficients.resize(static_cast<size_t>(getNumTaps()) * static_cast<size_t>(numRows));
int coefficientIndex = 0;
double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
// Stretch the sinc function for low pass filtering.
@ -150,7 +151,7 @@ void MultiChannelResampler::generateCoefficients(int32_t inputRate,
#if MCR_USE_KAISER
float window = mKaiserWindow(tapPhase * numTapsHalfInverse);
#else
float window = mCoshWindow(tapPhase * numTapsHalfInverse);
float window = mCoshWindow(static_cast<double>(tapPhase) * numTapsHalfInverse);
#endif
float coefficient = sinc(radians * cutoffScaler) * window;
mCoefficients.at(coefficientIndex++) = coefficient;

View File

@ -14,8 +14,8 @@
* limitations under the License.
*/
#ifndef OBOE_MULTICHANNEL_RESAMPLER_H
#define OBOE_MULTICHANNEL_RESAMPLER_H
#ifndef RESAMPLER_MULTICHANNEL_RESAMPLER_H
#define RESAMPLER_MULTICHANNEL_RESAMPLER_H
#include <memory>
#include <vector>
@ -34,7 +34,9 @@
#include "HyperbolicCosineWindow.h"
#endif
namespace resampler {
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class MultiChannelResampler {
@ -267,5 +269,6 @@ private:
const int mChannelCount;
};
}
#endif //OBOE_MULTICHANNEL_RESAMPLER_H
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_MULTICHANNEL_RESAMPLER_H

View File

@ -14,12 +14,12 @@
* limitations under the License.
*/
#include <assert.h>
#include <cassert>
#include <math.h>
#include "IntegerRatio.h"
#include "PolyphaseResampler.h"
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
PolyphaseResampler::PolyphaseResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder)
@ -40,13 +40,11 @@ void PolyphaseResampler::readFrame(float *frame) {
// Clear accumulator for mixing.
std::fill(mSingleFrame.begin(), mSingleFrame.end(), 0.0);
// printf("PolyphaseResampler: mCoefficientCursor = %4d\n", mCoefficientCursor);
// Multiply input times windowed sinc function.
float *coefficients = &mCoefficients[mCoefficientCursor];
float *xFrame = &mX[mCursor * getChannelCount()];
float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) {
float coefficient = *coefficients++;
// printf("PolyphaseResampler: coeff = %10.6f, xFrame[0] = %10.6f\n", coefficient, xFrame[0]);
for (int channel = 0; channel < getChannelCount(); channel++) {
mSingleFrame[channel] += *xFrame++ * coefficient;
}

View File

@ -14,19 +14,21 @@
* limitations under the License.
*/
#ifndef OBOE_POLYPHASE_RESAMPLER_H
#define OBOE_POLYPHASE_RESAMPLER_H
#ifndef RESAMPLER_POLYPHASE_RESAMPLER_H
#define RESAMPLER_POLYPHASE_RESAMPLER_H
#include <memory>
#include <vector>
#include <sys/types.h>
#include <unistd.h>
#include "MultiChannelResampler.h"
namespace resampler {
#include "MultiChannelResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Resample that is optimized for a reduced ratio of sample rates.
* All of the coefficients for eacxh possible phase value are precalculated.
* Resampler that is optimized for a reduced ratio of sample rates.
* All of the coefficients for each possible phase value are pre-calculated.
*/
class PolyphaseResampler : public MultiChannelResampler {
public:
@ -46,6 +48,6 @@ protected:
};
}
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_POLYPHASE_RESAMPLER_H
#endif //RESAMPLER_POLYPHASE_RESAMPLER_H

View File

@ -14,11 +14,10 @@
* limitations under the License.
*/
#include <cassert>
#include "PolyphaseResamplerMono.h"
#include <assert.h>
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define MONO 1

View File

@ -14,14 +14,16 @@
* limitations under the License.
*/
#ifndef OBOE_POLYPHASE_RESAMPLER_MONO_H
#define OBOE_POLYPHASE_RESAMPLER_MONO_H
#ifndef RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
#define RESAMPLER_POLYPHASE_RESAMPLER_MONO_H
#include <sys/types.h>
#include <unistd.h>
#include "PolyphaseResampler.h"
namespace resampler {
#include "PolyphaseResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class PolyphaseResamplerMono : public PolyphaseResampler {
public:
@ -34,6 +36,6 @@ public:
void readFrame(float *frame) override;
};
}
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_POLYPHASE_RESAMPLER_MONO_H
#endif //RESAMPLER_POLYPHASE_RESAMPLER_MONO_H

View File

@ -14,11 +14,10 @@
* limitations under the License.
*/
#include <cassert>
#include "PolyphaseResamplerStereo.h"
#include <assert.h>
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define STEREO 2

View File

@ -14,14 +14,16 @@
* limitations under the License.
*/
#ifndef OBOE_POLYPHASE_RESAMPLER_STEREO_H
#define OBOE_POLYPHASE_RESAMPLER_STEREO_H
#ifndef RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
#define RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H
#include <sys/types.h>
#include <unistd.h>
#include "PolyphaseResampler.h"
namespace resampler {
#include "PolyphaseResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class PolyphaseResamplerStereo : public PolyphaseResampler {
public:
@ -34,6 +36,6 @@ public:
void readFrame(float *frame) override;
};
}
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //OBOE_POLYPHASE_RESAMPLER_STEREO_H
#endif //RESAMPLER_POLYPHASE_RESAMPLER_STEREO_H

View File

@ -1,12 +1,21 @@
# Sample Rate Converter
This folder contains a sample rate converter, or "resampler".
It is part of [Oboe](https://github.com/google/oboe) but has no dependencies on Oboe.
So the contents of this folder can be used outside of Oboe.
The converter is based on a sinc function that has been windowed by a hyperbolic cosine.
We found this had fewer artifacts than the more traditional Kaiser window.
## Building the Resampler
It is part of [Oboe](https://github.com/google/oboe) but has no dependencies on Oboe.
So the contents of this folder can be used outside of Oboe.
To build it for use outside of Oboe:
1. Copy the "resampler" folder to a folder in your project that is in the include path.
2. Add all of the \*.cpp files in the resampler folder to your project IDE or Makefile.
3. In ResamplerDefinitions.h, define RESAMPLER_OUTER_NAMESPACE with your own project name. Alternatively, use -DRESAMPLER_OUTER_NAMESPACE=mynamespace when compiling to avoid modifying the resampler code.
## Creating a Resampler
Include the [main header](MultiChannelResampler.h) for the resampler.
@ -20,7 +29,7 @@ Only do this once, when you open your stream. Then use the sample resampler to p
2, // channel count
44100, // input sampleRate
48000, // output sampleRate
MultiChannelResampler::Medium); // conversion quality
MultiChannelResampler::Quality::Medium); // conversion quality
Possible values for quality include { Fastest, Low, Medium, High, Best }.
Higher quality levels will sound better but consume more CPU because they have more taps in the filter.
@ -29,11 +38,11 @@ Higher quality levels will sound better but consume more CPU because they have m
Note that the number of output frames generated for a given number of input frames can vary.
For example, suppose you are converting from 44100 Hz to 48000 Hz and using an input buffer with 940 frames. If you calculate the number of output frames you get:
For example, suppose you are converting from 44100 Hz to 48000 Hz and using an input buffer with 960 frames. If you calculate the number of output frames you get:
940 * 48000 * 44100 = 1023.1292517...
960.0 * 48000 / 44100 = 1044.897959...
You cannot generate a fractional number of frames. So the resampler will sometimes generate 1023 frames and sometimes 1024 frames. On average it will generate 1023.1292517 frames. The resampler stores the fraction internally and keeps track of when to consume or generate a frame.
You cannot generate a fractional number of frames. So the resampler will sometimes generate 1044 frames and sometimes 1045 frames. On average it will generate 1044.897959 frames. The resampler stores the fraction internally and keeps track of when to consume or generate a frame.
You can either use a fixed number of input frames or a fixed number of output frames. The other frame count will vary.
@ -90,4 +99,3 @@ Assume you start with these variables:
When you are done, you should delete the Resampler to avoid a memory leak.
delete resampler;

View File

@ -0,0 +1,27 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Set flag RESAMPLER_OUTER_NAMESPACE based on whether compiler flag
// __ANDROID_NDK__ is defined. __ANDROID_NDK__ should be defined in oboe
// but not in android.
#ifndef RESAMPLER_OUTER_NAMESPACE
#ifdef __ANDROID_NDK__
#define RESAMPLER_OUTER_NAMESPACE oboe
#else
#define RESAMPLER_OUTER_NAMESPACE aaudio
#endif // __ANDROID_NDK__
#endif // RESAMPLER_OUTER_NAMESPACE

View File

@ -14,20 +14,20 @@
* limitations under the License.
*/
#include <assert.h>
#include <cassert>
#include <math.h>
#include "SincResampler.h"
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
SincResampler::SincResampler(const MultiChannelResampler::Builder &builder)
: MultiChannelResampler(builder)
, mSingleFrame2(builder.getChannelCount()) {
assert((getNumTaps() % 4) == 0); // Required for loop unrolling.
mNumRows = kMaxCoefficients / getNumTaps(); // no guard row needed
// printf("SincResampler: numRows = %d\n", mNumRows);
mPhaseScaler = (double) mNumRows / mDenominator;
double phaseIncrement = 1.0 / mNumRows;
mNumRows = kMaxCoefficients / getNumTaps(); // includes guard row
int32_t numRowsNoGuard = mNumRows - 1;
mPhaseScaler = (double) numRowsNoGuard / mDenominator;
double phaseIncrement = 1.0 / numRowsNoGuard;
generateCoefficients(builder.getInputRate(),
builder.getOutputRate(),
mNumRows,
@ -42,33 +42,27 @@ void SincResampler::readFrame(float *frame) {
// Determine indices into coefficients table.
double tablePhase = getIntegerPhase() * mPhaseScaler;
int index1 = static_cast<int>(floor(tablePhase));
if (index1 >= mNumRows) { // no guard row needed because we wrap the indices
tablePhase -= mNumRows;
index1 -= mNumRows;
}
int indexLow = static_cast<int>(floor(tablePhase));
int indexHigh = indexLow + 1; // OK because using a guard row.
assert (indexHigh < mNumRows);
float *coefficientsLow = &mCoefficients[static_cast<size_t>(indexLow)
* static_cast<size_t>(getNumTaps())];
float *coefficientsHigh = &mCoefficients[static_cast<size_t>(indexHigh)
* static_cast<size_t>(getNumTaps())];
int index2 = index1 + 1;
if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
index2 -= mNumRows;
}
float *coefficients1 = &mCoefficients[index1 * getNumTaps()];
float *coefficients2 = &mCoefficients[index2 * getNumTaps()];
float *xFrame = &mX[mCursor * getChannelCount()];
for (int i = 0; i < mNumTaps; i++) {
float coefficient1 = *coefficients1++;
float coefficient2 = *coefficients2++;
float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int tap = 0; tap < mNumTaps; tap++) {
float coefficientLow = *coefficientsLow++;
float coefficientHigh = *coefficientsHigh++;
for (int channel = 0; channel < getChannelCount(); channel++) {
float sample = *xFrame++;
mSingleFrame[channel] += sample * coefficient1;
mSingleFrame2[channel] += sample * coefficient2;
mSingleFrame[channel] += sample * coefficientLow;
mSingleFrame2[channel] += sample * coefficientHigh;
}
}
// Interpolate and copy to output.
float fraction = tablePhase - index1;
float fraction = tablePhase - indexLow;
for (int channel = 0; channel < getChannelCount(); channel++) {
float low = mSingleFrame[channel];
float high = mSingleFrame2[channel];

View File

@ -14,15 +14,17 @@
* limitations under the License.
*/
#ifndef OBOE_SINC_RESAMPLER_H
#define OBOE_SINC_RESAMPLER_H
#ifndef RESAMPLER_SINC_RESAMPLER_H
#define RESAMPLER_SINC_RESAMPLER_H
#include <memory>
#include <sys/types.h>
#include <unistd.h>
#include "MultiChannelResampler.h"
namespace resampler {
#include "MultiChannelResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
/**
* Resampler that can interpolate between coefficients.
@ -43,5 +45,6 @@ protected:
double mPhaseScaler = 1.0;
};
}
#endif //OBOE_SINC_RESAMPLER_H
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_SINC_RESAMPLER_H

View File

@ -14,13 +14,12 @@
* limitations under the License.
*/
#include <cassert>
#include <math.h>
#include "SincResamplerStereo.h"
#include <assert.h>
using namespace resampler;
using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
#define STEREO 2
@ -55,13 +54,13 @@ void SincResamplerStereo::readFrame(float *frame) {
// Determine indices into coefficients table.
double tablePhase = getIntegerPhase() * mPhaseScaler;
int index1 = static_cast<int>(floor(tablePhase));
float *coefficients1 = &mCoefficients[index1 * getNumTaps()];
float *coefficients1 = &mCoefficients[static_cast<size_t>(index1)
* static_cast<size_t>(getNumTaps())];
int index2 = (index1 + 1);
if (index2 >= mNumRows) { // no guard row needed because we wrap the indices
index2 = 0;
}
float *coefficients2 = &mCoefficients[index2 * getNumTaps()];
float *xFrame = &mX[mCursor * getChannelCount()];
assert (index2 < mNumRows);
float *coefficients2 = &mCoefficients[static_cast<size_t>(index2)
* static_cast<size_t>(getNumTaps())];
float *xFrame = &mX[static_cast<size_t>(mCursor) * static_cast<size_t>(getChannelCount())];
for (int i = 0; i < mNumTaps; i++) {
float coefficient1 = *coefficients1++;
float coefficient2 = *coefficients2++;

View File

@ -14,14 +14,16 @@
* limitations under the License.
*/
#ifndef OBOE_SINC_RESAMPLER_STEREO_H
#define OBOE_SINC_RESAMPLER_STEREO_H
#ifndef RESAMPLER_SINC_RESAMPLER_STEREO_H
#define RESAMPLER_SINC_RESAMPLER_STEREO_H
#include <sys/types.h>
#include <unistd.h>
#include "SincResampler.h"
namespace resampler {
#include "SincResampler.h"
#include "ResamplerDefinitions.h"
namespace RESAMPLER_OUTER_NAMESPACE::resampler {
class SincResamplerStereo : public SincResampler {
public:
@ -35,5 +37,6 @@ public:
};
}
#endif //OBOE_SINC_RESAMPLER_STEREO_H
} /* namespace RESAMPLER_OUTER_NAMESPACE::resampler */
#endif //RESAMPLER_SINC_RESAMPLER_STEREO_H

View File

@ -19,6 +19,7 @@
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "common/OboeDebug.h"
#include "oboe/AudioStreamBuilder.h"
#include "AudioInputStreamOpenSLES.h"
#include "AudioStreamOpenSLES.h"
@ -36,6 +37,7 @@ static SLuint32 OpenSLES_convertInputPreset(InputPreset oboePreset) {
openslPreset = SL_ANDROID_RECORDING_PRESET_CAMCORDER;
break;
case InputPreset::VoiceRecognition:
case InputPreset::VoicePerformance:
openslPreset = SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
break;
case InputPreset::VoiceCommunication:
@ -97,9 +99,10 @@ Result AudioInputStreamOpenSLES::open() {
SLuint32 bitsPerSample = static_cast<SLuint32>(getBytesPerSample() * kBitsPerByte);
// configure audio sink
mBufferQueueLength = calculateOptimalBufferQueueLength();
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {
SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, // locatorType
static_cast<SLuint32>(kBufferQueueLength)}; // numBuffers
static_cast<SLuint32>(mBufferQueueLength)}; // numBuffers
// Define the audio data format.
SLDataFormat_PCM format_pcm = {
@ -155,14 +158,19 @@ Result AudioInputStreamOpenSLES::open() {
LOGW("%s() GetInterface(SL_IID_ANDROIDCONFIGURATION) failed with %s",
__func__, getSLErrStr(result));
} else {
if (getInputPreset() == InputPreset::VoicePerformance) {
LOGD("OpenSL ES does not support InputPreset::VoicePerformance. Use VoiceRecognition.");
mInputPreset = InputPreset::VoiceRecognition;
}
SLuint32 presetValue = OpenSLES_convertInputPreset(getInputPreset());
result = (*configItf)->SetConfiguration(configItf,
SL_ANDROID_KEY_RECORDING_PRESET,
&presetValue,
sizeof(SLuint32));
if (SL_RESULT_SUCCESS != result
&& presetValue != SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION) {
&& presetValue != SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION) {
presetValue = SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
LOGD("Setting InputPreset %d failed. Using VoiceRecognition instead.", getInputPreset());
mInputPreset = InputPreset::VoiceRecognition;
(*configItf)->SetConfiguration(configItf,
SL_ANDROID_KEY_RECORDING_PRESET,
@ -188,45 +196,34 @@ Result AudioInputStreamOpenSLES::open() {
goto error;
}
result = AudioStreamOpenSLES::registerBufferQueueCallback();
result = finishCommonOpen(configItf);
if (SL_RESULT_SUCCESS != result) {
goto error;
}
result = updateStreamParameters(configItf);
if (SL_RESULT_SUCCESS != result) {
goto error;
}
oboeResult = configureBufferSizes(mSampleRate);
if (Result::OK != oboeResult) {
goto error;
}
allocateFifo();
setState(StreamState::Open);
return Result::OK;
error:
close(); // Clean up various OpenSL objects and prevent resource leaks.
return Result::ErrorInternal; // TODO convert error from SLES to OBOE
}
Result AudioInputStreamOpenSLES::close() {
LOGD("AudioInputStreamOpenSLES::%s()", __func__);
mLock.lock();
std::lock_guard<std::mutex> lock(mLock);
Result result = Result::OK;
if (getState() == StreamState::Closed){
result = Result::ErrorClosed;
} else {
mLock.unlock(); // avoid recursive lock
requestStop();
mLock.lock();
(void) requestStop_l();
if (OboeGlobals::areWorkaroundsEnabled()) {
sleepBeforeClose();
}
// invalidate any interfaces
mRecordInterface = nullptr;
result = AudioStreamOpenSLES::close();
result = AudioStreamOpenSLES::close_l();
}
mLock.unlock(); // avoid recursive lock
return result;
}
@ -235,7 +232,7 @@ Result AudioInputStreamOpenSLES::setRecordState_l(SLuint32 newState) {
Result result = Result::OK;
if (mRecordInterface == nullptr) {
LOGE("AudioInputStreamOpenSLES::%s() mRecordInterface is null", __func__);
LOGW("AudioInputStreamOpenSLES::%s() mRecordInterface is null", __func__);
return Result::ErrorInvalidState;
}
SLresult slResult = (*mRecordInterface)->SetRecordState(mRecordInterface, newState);
@ -267,12 +264,16 @@ Result AudioInputStreamOpenSLES::requestStart() {
setDataCallbackEnabled(true);
setState(StreamState::Starting);
Result result = setRecordState_l(SL_RECORDSTATE_RECORDING);
if (result == Result::OK) {
setState(StreamState::Started);
if (getBufferDepth(mSimpleBufferQueueInterface) == 0) {
// Enqueue the first buffer to start the streaming.
// This does not call the callback function.
enqueueCallbackBuffer(mSimpleBufferQueueInterface);
}
Result result = setRecordState_l(SL_RECORDSTATE_RECORDING);
if (result == Result::OK) {
setState(StreamState::Started);
} else {
setState(initialState);
}
@ -294,13 +295,18 @@ Result AudioInputStreamOpenSLES::requestFlush() {
Result AudioInputStreamOpenSLES::requestStop() {
LOGD("AudioInputStreamOpenSLES(): %s() called", __func__);
std::lock_guard<std::mutex> lock(mLock);
return requestStop_l();
}
// Call under mLock
Result AudioInputStreamOpenSLES::requestStop_l() {
StreamState initialState = getState();
switch (initialState) {
case StreamState::Stopping:
case StreamState::Stopped:
return Result::OK;
case StreamState::Uninitialized:
case StreamState::Closed:
return Result::ErrorClosed;
default:

Some files were not shown because too many files have changed in this diff Show More