Move dead submodules in-tree

Signed-off-by: swurl <swurl@swurl.xyz>
This commit is contained in:
swurl 2025-05-31 02:33:02 -04:00
parent c0cceff365
commit 6c655321e6
No known key found for this signature in database
GPG key ID: A5A7629F109C8FD1
4081 changed files with 1185566 additions and 45 deletions

10
externals/oboe/samples/.gitignore vendored Normal file
View file

@ -0,0 +1,10 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
.externalNativeBuild
test/build

View file

@ -0,0 +1,36 @@
LiveEffect Sample
============
This sample simply loops audio from input stream to output stream to demonstrate
the usage of the 2 stream interfaces.
Screenshots
-----------
![Screenshot](screenshot.png)
### Stream Configurations
- 48kHz
- oboe::I16
- stereo or mono
### Customizing the App
If you want to customize the effects processing then modify the
onBothStreamsReady() method in "src/main/cpp/FullDuplexPass.h"
### Caveats
OpenES SL does not allow setting the recording or playback device.
Synchronizing input and output streams for full-duplex operation is tricky. 
Input and output have different startup times. The input side may have to charge up the microphone circuit.
Also the initial timing for the output callback may be bursty as it fills the buffer up.
So when the output stream makes its first callback, the input buffer may be overflowing or empty or partially full.
In order to get into sync we go through a few phases.
* In Phase 1 we always drain the input buffer as much as possible, more than the output callback asks for. When we have done this for a while, we move to phase 2.
* In Phase 2 we optionally skip reading the input once to allow it to fill up with one burst. This makes it less likely to underflow on future reads.
* In Phase 3 we should be in a stable situation where the output is nearly full and the input is nearly empty.  You should be able to run for hours like this with no glitches.

View file

@ -0,0 +1,43 @@
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 35
defaultConfig {
applicationId 'com.google.oboe.samples.liveeffect'
minSdkVersion 21
targetSdkVersion 35
versionCode 1
versionName '1.0'
ndk {
abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
}
externalNativeBuild {
cmake {
arguments '-DANDROID_TOOLCHAIN=clang'
abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
}
}
}
buildTypes {
release {
minifyEnabled false
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_18
targetCompatibility JavaVersion.VERSION_18
}
externalNativeBuild {
cmake {
path 'src/main/cpp/CMakeLists.txt'
}
}
namespace 'com.google.oboe.samples.liveEffect'
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.7.0'
implementation 'androidx.constraintlayout:constraintlayout:2.2.1'
implementation project(':audio-device')
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

View file

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-feature android:name="android.hardware.microphone" android:required="true" />
<uses-feature android:name="android.hardware.audio.output" android:required="true" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_MEDIA_PLAYBACK" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_MICROPHONE" />
<application
android:allowBackup="false"
android:fullBackupContent="false"
android:supportsRtl="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name="com.google.oboe.samples.liveEffect.MainActivity"
android:label="@string/app_name"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name=".DuplexStreamForegroundService"
android:foregroundServiceType="mediaPlayback|microphone"
android:exported="false">
</service>
</application>
</manifest>

View file

@ -0,0 +1,46 @@
#
# Copyright 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
cmake_minimum_required(VERSION 3.4.1)
project(liveEffect LANGUAGES C CXX)
get_filename_component(SAMPLE_ROOT_DIR
${CMAKE_CURRENT_SOURCE_DIR}/../../../.. ABSOLUTE)
### INCLUDE OBOE LIBRARY ###
set (OBOE_DIR ${SAMPLE_ROOT_DIR}/..)
add_subdirectory(${OBOE_DIR} ./oboe-bin)
add_library(liveEffect
SHARED
LiveEffectEngine.cpp
jni_bridge.cpp
${SAMPLE_ROOT_DIR}/debug-utils/trace.cpp)
target_include_directories(liveEffect
PRIVATE
${SAMPLE_ROOT_DIR}/debug-utils
${OBOE_DIR}/include)
target_link_libraries(liveEffect
PRIVATE
oboe
android
atomic
log)
target_link_options(liveEffect PRIVATE "-Wl,-z,max-page-size=16384")
# Enable optimization flags: if having problems with source level debugging,
# disable -Ofast ( and debug ), re-enable it after done debugging.
target_compile_options(liveEffect PRIVATE -Wall -Werror "$<$<CONFIG:RELEASE>:-Ofast>")

View file

@ -0,0 +1,54 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SAMPLES_FULLDUPLEXPASS_H
#define SAMPLES_FULLDUPLEXPASS_H
class FullDuplexPass : public oboe::FullDuplexStream {
public:
virtual oboe::DataCallbackResult
onBothStreamsReady(
const void *inputData,
int numInputFrames,
void *outputData,
int numOutputFrames) {
// Copy the input samples to the output with a little arbitrary gain change.
// This code assumes the data format for both streams is Float.
const float *inputFloats = static_cast<const float *>(inputData);
float *outputFloats = static_cast<float *>(outputData);
// It also assumes the channel count for each stream is the same.
int32_t samplesPerFrame = getOutputStream()->getChannelCount();
int32_t numInputSamples = numInputFrames * samplesPerFrame;
int32_t numOutputSamples = numOutputFrames * samplesPerFrame;
// It is possible that there may be fewer input than output samples.
int32_t samplesToProcess = std::min(numInputSamples, numOutputSamples);
for (int32_t i = 0; i < samplesToProcess; i++) {
*outputFloats++ = *inputFloats++ * 0.95; // do some arbitrary processing
}
// If there are fewer input samples then clear the rest of the buffer.
int32_t samplesLeft = numOutputSamples - numInputSamples;
for (int32_t i = 0; i < samplesLeft; i++) {
*outputFloats++ = 0.0; // silence
}
return oboe::DataCallbackResult::Continue;
}
};
#endif //SAMPLES_FULLDUPLEXPASS_H

View file

@ -0,0 +1,245 @@
/**
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cassert>
#include <logging_macros.h>
#include "LiveEffectEngine.h"
LiveEffectEngine::LiveEffectEngine() {
assert(mOutputChannelCount == mInputChannelCount);
}
void LiveEffectEngine::setRecordingDeviceId(int32_t deviceId) {
mRecordingDeviceId = deviceId;
}
void LiveEffectEngine::setPlaybackDeviceId(int32_t deviceId) {
mPlaybackDeviceId = deviceId;
}
bool LiveEffectEngine::isAAudioRecommended() {
return oboe::AudioStreamBuilder::isAAudioRecommended();
}
bool LiveEffectEngine::setAudioApi(oboe::AudioApi api) {
if (mIsEffectOn) return false;
mAudioApi = api;
return true;
}
bool LiveEffectEngine::setEffectOn(bool isOn) {
bool success = true;
if (isOn != mIsEffectOn) {
if (isOn) {
success = openStreams() == oboe::Result::OK;
if (success) {
mIsEffectOn = isOn;
}
} else {
closeStreams();
mIsEffectOn = isOn;
}
}
return success;
}
void LiveEffectEngine::closeStreams() {
/*
* Note: The order of events is important here.
* The playback stream must be closed before the recording stream. If the
* recording stream were to be closed first the playback stream's
* callback may attempt to read from the recording stream
* which would cause the app to crash since the recording stream would be
* null.
*/
mDuplexStream->stop();
closeStream(mPlayStream);
closeStream(mRecordingStream);
mDuplexStream.reset();
}
oboe::Result LiveEffectEngine::openStreams() {
// Note: The order of stream creation is important. We create the playback
// stream first, then use properties from the playback stream
// (e.g. sample rate) to create the recording stream. By matching the
// properties we should get the lowest latency path
oboe::AudioStreamBuilder inBuilder, outBuilder;
setupPlaybackStreamParameters(&outBuilder);
oboe::Result result = outBuilder.openStream(mPlayStream);
if (result != oboe::Result::OK) {
LOGE("Failed to open output stream. Error %s", oboe::convertToText(result));
mSampleRate = oboe::kUnspecified;
return result;
} else {
// The input stream needs to run at the same sample rate as the output.
mSampleRate = mPlayStream->getSampleRate();
}
warnIfNotLowLatency(mPlayStream);
setupRecordingStreamParameters(&inBuilder, mSampleRate);
result = inBuilder.openStream(mRecordingStream);
if (result != oboe::Result::OK) {
LOGE("Failed to open input stream. Error %s", oboe::convertToText(result));
closeStream(mPlayStream);
return result;
}
warnIfNotLowLatency(mRecordingStream);
mDuplexStream = std::make_unique<FullDuplexPass>();
mDuplexStream->setSharedInputStream(mRecordingStream);
mDuplexStream->setSharedOutputStream(mPlayStream);
mDuplexStream->start();
return result;
}
/**
* Sets the stream parameters which are specific to recording,
* including the sample rate which is determined from the
* playback stream.
*
* @param builder The recording stream builder
* @param sampleRate The desired sample rate of the recording stream
*/
oboe::AudioStreamBuilder *LiveEffectEngine::setupRecordingStreamParameters(
oboe::AudioStreamBuilder *builder, int32_t sampleRate) {
// This sample uses blocking read() because we don't specify a callback
builder->setDeviceId(mRecordingDeviceId)
->setDirection(oboe::Direction::Input)
->setSampleRate(sampleRate)
->setChannelCount(mInputChannelCount);
return setupCommonStreamParameters(builder);
}
/**
* Sets the stream parameters which are specific to playback, including device
* id and the dataCallback function, which must be set for low latency
* playback.
* @param builder The playback stream builder
*/
oboe::AudioStreamBuilder *LiveEffectEngine::setupPlaybackStreamParameters(
oboe::AudioStreamBuilder *builder) {
builder->setDataCallback(this)
->setErrorCallback(this)
->setDeviceId(mPlaybackDeviceId)
->setDirection(oboe::Direction::Output)
->setChannelCount(mOutputChannelCount);
return setupCommonStreamParameters(builder);
}
/**
* Set the stream parameters which are common to both recording and playback
* streams.
* @param builder The playback or recording stream builder
*/
oboe::AudioStreamBuilder *LiveEffectEngine::setupCommonStreamParameters(
oboe::AudioStreamBuilder *builder) {
// We request EXCLUSIVE mode since this will give us the lowest possible
// latency.
// If EXCLUSIVE mode isn't available the builder will fall back to SHARED
// mode.
builder->setAudioApi(mAudioApi)
->setFormat(mFormat)
->setFormatConversionAllowed(true)
->setSharingMode(oboe::SharingMode::Exclusive)
->setPerformanceMode(oboe::PerformanceMode::LowLatency);
return builder;
}
/**
* Close the stream. AudioStream::close() is a blocking call so
* the application does not need to add synchronization between
* onAudioReady() function and the thread calling close().
* [the closing thread is the UI thread in this sample].
* @param stream the stream to close
*/
void LiveEffectEngine::closeStream(std::shared_ptr<oboe::AudioStream> &stream) {
if (stream) {
oboe::Result result = stream->stop();
if (result != oboe::Result::OK) {
LOGW("Error stopping stream: %s", oboe::convertToText(result));
}
result = stream->close();
if (result != oboe::Result::OK) {
LOGE("Error closing stream: %s", oboe::convertToText(result));
} else {
LOGW("Successfully closed streams");
}
stream.reset();
}
}
/**
* Warn in logcat if non-low latency stream is created
* @param stream: newly created stream
*
*/
void LiveEffectEngine::warnIfNotLowLatency(std::shared_ptr<oboe::AudioStream> &stream) {
if (stream->getPerformanceMode() != oboe::PerformanceMode::LowLatency) {
LOGW(
"Stream is NOT low latency."
"Check your requested format, sample rate and channel count");
}
}
/**
* Handles playback stream's audio request. In this sample, we simply block-read
* from the record stream for the required samples.
*
* @param oboeStream: the playback stream that requesting additional samples
* @param audioData: the buffer to load audio samples for playback stream
* @param numFrames: number of frames to load to audioData buffer
* @return: DataCallbackResult::Continue.
*/
oboe::DataCallbackResult LiveEffectEngine::onAudioReady(
oboe::AudioStream *oboeStream, void *audioData, int32_t numFrames) {
return mDuplexStream->onAudioReady(oboeStream, audioData, numFrames);
}
/**
* Oboe notifies the application for "about to close the stream".
*
* @param oboeStream: the stream to close
* @param error: oboe's reason for closing the stream
*/
void LiveEffectEngine::onErrorBeforeClose(oboe::AudioStream *oboeStream,
oboe::Result error) {
LOGE("%s stream Error before close: %s",
oboe::convertToText(oboeStream->getDirection()),
oboe::convertToText(error));
}
/**
* Oboe notifies application that "the stream is closed"
*
* @param oboeStream
* @param error
*/
void LiveEffectEngine::onErrorAfterClose(oboe::AudioStream *oboeStream,
oboe::Result error) {
LOGE("%s stream Error after close: %s",
oboe::convertToText(oboeStream->getDirection()),
oboe::convertToText(error));
closeStreams();
// Restart the stream if the error is a disconnect.
if (error == oboe::Result::ErrorDisconnected) {
LOGI("Restarting AudioStream");
openStreams();
}
}

View file

@ -0,0 +1,83 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOE_LIVEEFFECTENGINE_H
#define OBOE_LIVEEFFECTENGINE_H
#include <jni.h>
#include <oboe/Oboe.h>
#include <string>
#include <thread>
#include "FullDuplexPass.h"
class LiveEffectEngine : public oboe::AudioStreamCallback {
public:
LiveEffectEngine();
void setRecordingDeviceId(int32_t deviceId);
void setPlaybackDeviceId(int32_t deviceId);
/**
* @param isOn
* @return true if it succeeds
*/
bool setEffectOn(bool isOn);
/*
* oboe::AudioStreamDataCallback interface implementation
*/
oboe::DataCallbackResult onAudioReady(oboe::AudioStream *oboeStream,
void *audioData, int32_t numFrames) override;
/*
* oboe::AudioStreamErrorCallback interface implementation
*/
void onErrorBeforeClose(oboe::AudioStream *oboeStream, oboe::Result error) override;
void onErrorAfterClose(oboe::AudioStream *oboeStream, oboe::Result error) override;
bool setAudioApi(oboe::AudioApi);
bool isAAudioRecommended(void);
private:
bool mIsEffectOn = false;
int32_t mRecordingDeviceId = oboe::kUnspecified;
int32_t mPlaybackDeviceId = oboe::kUnspecified;
const oboe::AudioFormat mFormat = oboe::AudioFormat::Float; // for easier processing
oboe::AudioApi mAudioApi = oboe::AudioApi::AAudio;
int32_t mSampleRate = oboe::kUnspecified;
const int32_t mInputChannelCount = oboe::ChannelCount::Stereo;
const int32_t mOutputChannelCount = oboe::ChannelCount::Stereo;
std::unique_ptr<FullDuplexPass> mDuplexStream;
std::shared_ptr<oboe::AudioStream> mRecordingStream;
std::shared_ptr<oboe::AudioStream> mPlayStream;
oboe::Result openStreams();
void closeStreams();
void closeStream(std::shared_ptr<oboe::AudioStream> &stream);
oboe::AudioStreamBuilder *setupCommonStreamParameters(
oboe::AudioStreamBuilder *builder);
oboe::AudioStreamBuilder *setupRecordingStreamParameters(
oboe::AudioStreamBuilder *builder, int32_t sampleRate);
oboe::AudioStreamBuilder *setupPlaybackStreamParameters(
oboe::AudioStreamBuilder *builder);
void warnIfNotLowLatency(std::shared_ptr<oboe::AudioStream> &stream);
};
#endif // OBOE_LIVEEFFECTENGINE_H

View file

@ -0,0 +1,134 @@
/**
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <logging_macros.h>
#include "LiveEffectEngine.h"
static const int kOboeApiAAudio = 0;
static const int kOboeApiOpenSLES = 1;
static LiveEffectEngine *engine = nullptr;
extern "C" {
JNIEXPORT jboolean JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_create(JNIEnv *env,
jclass) {
if (engine == nullptr) {
engine = new LiveEffectEngine();
}
return (engine != nullptr) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_delete(JNIEnv *env,
jclass) {
if (engine) {
engine->setEffectOn(false);
delete engine;
engine = nullptr;
}
}
JNIEXPORT jboolean JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_setEffectOn(
JNIEnv *env, jclass, jboolean isEffectOn) {
if (engine == nullptr) {
LOGE(
"Engine is null, you must call createEngine before calling this "
"method");
return JNI_FALSE;
}
return engine->setEffectOn(isEffectOn) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_setRecordingDeviceId(
JNIEnv *env, jclass, jint deviceId) {
if (engine == nullptr) {
LOGE(
"Engine is null, you must call createEngine before calling this "
"method");
return;
}
engine->setRecordingDeviceId(deviceId);
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_setPlaybackDeviceId(
JNIEnv *env, jclass, jint deviceId) {
if (engine == nullptr) {
LOGE(
"Engine is null, you must call createEngine before calling this "
"method");
return;
}
engine->setPlaybackDeviceId(deviceId);
}
JNIEXPORT jboolean JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_setAPI(JNIEnv *env,
jclass type,
jint apiType) {
if (engine == nullptr) {
LOGE(
"Engine is null, you must call createEngine "
"before calling this method");
return JNI_FALSE;
}
oboe::AudioApi audioApi;
switch (apiType) {
case kOboeApiAAudio:
audioApi = oboe::AudioApi::AAudio;
break;
case kOboeApiOpenSLES:
audioApi = oboe::AudioApi::OpenSLES;
break;
default:
LOGE("Unknown API selection to setAPI() %d", apiType);
return JNI_FALSE;
}
return engine->setAudioApi(audioApi) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_isAAudioRecommended(
JNIEnv *env, jclass type) {
if (engine == nullptr) {
LOGE(
"Engine is null, you must call createEngine "
"before calling this method");
return JNI_FALSE;
}
return engine->isAAudioRecommended() ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_liveEffect_LiveEffectEngine_native_1setDefaultStreamValues(JNIEnv *env,
jclass type,
jint sampleRate,
jint framesPerBurst) {
oboe::DefaultStreamValues::SampleRate = (int32_t) sampleRate;
oboe::DefaultStreamValues::FramesPerBurst = (int32_t) framesPerBurst;
}
} // extern "C"

View file

@ -0,0 +1,40 @@
# Copy shared STL files to Android Studio output directory so they can be
# packaged in the APK.
# Usage:
#
# find_package(ndk-stl REQUIRED)
#
# or
#
# find_package(ndk-stl REQUIRED PATHS ".")
if(NOT ${ANDROID_STL} MATCHES "_shared")
return()
endif()
function(configure_shared_stl lib_path so_base)
message("Configuring STL ${so_base} for ${ANDROID_ABI}")
configure_file(
"${ANDROID_NDK}/sources/cxx-stl/${lib_path}/libs/${ANDROID_ABI}/lib${so_base}.so"
"${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/lib${so_base}.so"
COPYONLY)
endfunction()
if("${ANDROID_STL}" STREQUAL "libstdc++")
# The default minimal system C++ runtime library.
elseif("${ANDROID_STL}" STREQUAL "gabi++_shared")
# The GAbi++ runtime (shared).
message(FATAL_ERROR "gabi++_shared was not configured by ndk-stl package")
elseif("${ANDROID_STL}" STREQUAL "stlport_shared")
# The STLport runtime (shared).
configure_shared_stl("stlport" "stlport_shared")
elseif("${ANDROID_STL}" STREQUAL "gnustl_shared")
# The GNU STL (shared).
configure_shared_stl("gnu-libstdc++/4.9" "gnustl_shared")
elseif("${ANDROID_STL}" STREQUAL "c++_shared")
# The LLVM libc++ runtime (shared).
configure_shared_stl("llvm-libc++" "c++_shared")
else()
message(FATAL_ERROR "STL configuration ANDROID_STL=${ANDROID_STL} is not supported")
endif()

View file

@ -0,0 +1,92 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.oboe.samples.liveEffect;
import android.app.ForegroundServiceStartNotAllowedException;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ServiceInfo;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.widget.Toast;
import androidx.core.app.NotificationCompat;
import androidx.core.app.ServiceCompat;
import androidx.core.content.ContextCompat;
public class DuplexStreamForegroundService extends Service {
private static final String TAG = "DuplexStreamFS";
public static final String ACTION_START = "ACTION_START";
public static final String ACTION_STOP = "ACTION_STOP";
@Override
public IBinder onBind(Intent intent) {
// We don't provide binding, so return null
return null;
}
private Notification buildNotification() {
NotificationManager manager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
manager.createNotificationChannel(new NotificationChannel(
"all",
"All Notifications",
NotificationManager.IMPORTANCE_NONE));
return new Notification.Builder(this, "all")
.setContentTitle("Playing/recording audio")
.setContentText("playing/recording...")
.setSmallIcon(R.mipmap.ic_launcher)
.build();
}
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i(TAG, "Receive onStartCommand" + intent);
switch (intent.getAction()) {
case ACTION_START:
Log.i(TAG, "Receive ACTION_START" + intent.getExtras());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
startForeground(1, buildNotification(),
ServiceInfo.FOREGROUND_SERVICE_TYPE_MEDIA_PLAYBACK
| ServiceInfo.FOREGROUND_SERVICE_TYPE_MICROPHONE);
}
break;
case ACTION_STOP:
Log.i(TAG, "Receive ACTION_STOP" + intent.getExtras());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
stopForeground(STOP_FOREGROUND_REMOVE);
}
break;
}
return START_NOT_STICKY;
}
}

View file

@ -0,0 +1,52 @@
package com.google.oboe.samples.liveEffect;
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.content.Context;
import android.media.AudioManager;
import android.os.Build;
public enum LiveEffectEngine {
INSTANCE;
// Load native library
static {
System.loadLibrary("liveEffect");
}
// Native methods
static native boolean create();
static native boolean isAAudioRecommended();
static native boolean setAPI(int apiType);
static native boolean setEffectOn(boolean isEffectOn);
static native void setRecordingDeviceId(int deviceId);
static native void setPlaybackDeviceId(int deviceId);
static native void delete();
static native void native_setDefaultStreamValues(int defaultSampleRate, int defaultFramesPerBurst);
static void setDefaultStreamValues(Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1){
AudioManager myAudioMgr = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
String sampleRateStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
int defaultSampleRate = Integer.parseInt(sampleRateStr);
String framesPerBurstStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
int defaultFramesPerBurst = Integer.parseInt(framesPerBurstStr);
native_setDefaultStreamValues(defaultSampleRate, defaultFramesPerBurst);
}
}
}

View file

@ -0,0 +1,299 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.oboe.samples.liveEffect;
import static com.google.oboe.samples.liveEffect.DuplexStreamForegroundService.ACTION_START;
import static com.google.oboe.samples.liveEffect.DuplexStreamForegroundService.ACTION_STOP;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.google.oboe.samples.audio_device.AudioDeviceListEntry;
import com.google.oboe.samples.audio_device.AudioDeviceSpinner;
/**
* TODO: Update README.md and go through and comment sample
*/
public class MainActivity extends Activity
implements ActivityCompat.OnRequestPermissionsResultCallback {
private static final String TAG = MainActivity.class.getName();
private static final int AUDIO_EFFECT_REQUEST = 0;
private static final int OBOE_API_AAUDIO = 0;
private static final int OBOE_API_OPENSL_ES=1;
private TextView statusText;
private Button toggleEffectButton;
private AudioDeviceSpinner recordingDeviceSpinner;
private AudioDeviceSpinner playbackDeviceSpinner;
private boolean isPlaying = false;
private int apiSelection = OBOE_API_AAUDIO;
private boolean mAAudioRecommended = true;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
statusText = findViewById(R.id.status_view_text);
toggleEffectButton = findViewById(R.id.button_toggle_effect);
toggleEffectButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
toggleEffect();
}
});
toggleEffectButton.setText(getString(R.string.start_effect));
recordingDeviceSpinner = findViewById(R.id.recording_devices_spinner);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
recordingDeviceSpinner.setDirectionType(AudioManager.GET_DEVICES_INPUTS);
recordingDeviceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
LiveEffectEngine.setRecordingDeviceId(getRecordingDeviceId());
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
// Do nothing
}
});
}
playbackDeviceSpinner = findViewById(R.id.playback_devices_spinner);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
playbackDeviceSpinner.setDirectionType(AudioManager.GET_DEVICES_OUTPUTS);
playbackDeviceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
LiveEffectEngine.setPlaybackDeviceId(getPlaybackDeviceId());
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
// Do nothing
}
});
}
((RadioGroup)findViewById(R.id.apiSelectionGroup)).check(R.id.aaudioButton);
findViewById(R.id.aaudioButton).setOnClickListener(new RadioButton.OnClickListener(){
@Override
public void onClick(View v) {
if (((RadioButton)v).isChecked()) {
apiSelection = OBOE_API_AAUDIO;
setSpinnersEnabled(true);
}
}
});
findViewById(R.id.slesButton).setOnClickListener(new RadioButton.OnClickListener(){
@Override
public void onClick(View v) {
if (((RadioButton)v).isChecked()) {
apiSelection = OBOE_API_OPENSL_ES;
setSpinnersEnabled(false);
}
}
});
LiveEffectEngine.setDefaultStreamValues(this);
setVolumeControlStream(AudioManager.STREAM_MUSIC);
if (!isRecordPermissionGranted()){
requestRecordPermission();
} else {
startForegroundService();
}
onStartTest();
}
private void EnableAudioApiUI(boolean enable) {
if(apiSelection == OBOE_API_AAUDIO && !mAAudioRecommended)
{
apiSelection = OBOE_API_OPENSL_ES;
}
findViewById(R.id.slesButton).setEnabled(enable);
if(!mAAudioRecommended) {
findViewById(R.id.aaudioButton).setEnabled(false);
} else {
findViewById(R.id.aaudioButton).setEnabled(enable);
}
((RadioGroup)findViewById(R.id.apiSelectionGroup))
.check(apiSelection == OBOE_API_AAUDIO ? R.id.aaudioButton : R.id.slesButton);
setSpinnersEnabled(enable);
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
onStopTest();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
Intent serviceIntent = new Intent(ACTION_STOP, null, this,
DuplexStreamForegroundService.class);
startForegroundService(serviceIntent);
}
super.onDestroy();
}
private void onStartTest() {
LiveEffectEngine.create();
mAAudioRecommended = LiveEffectEngine.isAAudioRecommended();
EnableAudioApiUI(true);
LiveEffectEngine.setAPI(apiSelection);
}
private void onStopTest() {
stopEffect();
LiveEffectEngine.delete();
}
public void toggleEffect() {
if (isPlaying) {
stopEffect();
} else {
LiveEffectEngine.setAPI(apiSelection);
startEffect();
}
}
private void startEffect() {
Log.d(TAG, "Attempting to start");
boolean success = LiveEffectEngine.setEffectOn(true);
if (success) {
statusText.setText(R.string.status_playing);
toggleEffectButton.setText(R.string.stop_effect);
isPlaying = true;
EnableAudioApiUI(false);
} else {
statusText.setText(R.string.status_open_failed);
isPlaying = false;
}
}
private void stopEffect() {
Log.d(TAG, "Playing, attempting to stop");
LiveEffectEngine.setEffectOn(false);
resetStatusView();
toggleEffectButton.setText(R.string.start_effect);
isPlaying = false;
EnableAudioApiUI(true);
}
private void setSpinnersEnabled(boolean isEnabled){
if (((RadioButton)findViewById(R.id.slesButton)).isChecked())
{
isEnabled = false;
playbackDeviceSpinner.setSelection(0);
recordingDeviceSpinner.setSelection(0);
}
recordingDeviceSpinner.setEnabled(isEnabled);
playbackDeviceSpinner.setEnabled(isEnabled);
}
private int getRecordingDeviceId(){
return ((AudioDeviceListEntry)recordingDeviceSpinner.getSelectedItem()).getId();
}
private int getPlaybackDeviceId(){
return ((AudioDeviceListEntry)playbackDeviceSpinner.getSelectedItem()).getId();
}
private boolean isRecordPermissionGranted() {
return (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) ==
PackageManager.PERMISSION_GRANTED);
}
private void requestRecordPermission(){
ActivityCompat.requestPermissions(
this,
new String[]{Manifest.permission.RECORD_AUDIO},
AUDIO_EFFECT_REQUEST);
}
private void resetStatusView() {
statusText.setText(R.string.status_warning);
}
private void startForegroundService() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
Intent serviceIntent = new Intent(ACTION_START, null, this,
DuplexStreamForegroundService.class);
startForegroundService(serviceIntent);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (AUDIO_EFFECT_REQUEST != requestCode) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 1 ||
grantResults[0] != PackageManager.PERMISSION_GRANTED) {
// User denied the permission, without this we cannot record audio
// Show a toast and update the status accordingly
statusText.setText(R.string.status_record_audio_denied);
Toast.makeText(getApplicationContext(),
getString(R.string.need_record_audio_permission),
Toast.LENGTH_SHORT)
.show();
EnableAudioApiUI(false);
toggleEffectButton.setEnabled(false);
} else {
// Permission was granted, start foreground service.
startForegroundService();
}
}
}

View file

@ -0,0 +1,128 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (C) 2018 The Android Open Source Project
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.google.oboe.samples.liveEffect.MainActivity"
tools:layout_editor_absoluteY="81dp">
<RadioGroup xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/apiSelectionGroup"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:orientation="horizontal"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent">
<TextView
android:id="@+id/apiTextView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/apiSelection" />
<RadioButton
android:id="@+id/aaudioButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginLeft="16dp"
android:text="@string/aaudio" />
<RadioButton
android:id="@+id/slesButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginLeft="16dp"
android:text="@string/sles" />
</RadioGroup>
<TextView
android:id="@+id/recDeviceLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:text="@string/recording_device"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/apiSelectionGroup"/>
<com.google.oboe.samples.audio_device.AudioDeviceSpinner
android:id="@+id/recording_devices_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/recDeviceLabel" />
<TextView
android:id="@+id/playDeviceLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:text="@string/playback_device"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/recording_devices_spinner" />
<com.google.oboe.samples.audio_device.AudioDeviceSpinner
android:id="@+id/playback_devices_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/playDeviceLabel" />
<Button
android:id="@+id/button_toggle_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="72dp"
android:gravity="center"
android:text="@string/start_effect"
android:textAllCaps="false"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="0.53"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/playback_devices_spinner" />
<TextView
android:id="@+id/status_view_text"
android:layout_width="0dp"
android:layout_height="60dp"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginEnd="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_group_margin"
android:lines="6"
android:text="@string/status_warning"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toBottomOf="@+id/button_toggle_effect"
app:layout_constraintBottom_toBottomOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View file

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (C) 2018 The Android Open Source Project
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.google.oboe.samples.liveEffect.MainActivity"
tools:layout_editor_absoluteY="81dp">
<RadioGroup xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/apiSelectionGroup"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:orientation="horizontal"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent">
<TextView
android:id="@+id/apiTextView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/apiSelection" />
<RadioButton
android:id="@+id/aaudioButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginLeft="16dp"
android:text="@string/aaudio" />
<RadioButton
android:id="@+id/slesButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginLeft="16dp"
android:text="@string/sles" />
</RadioGroup>
<TextView
android:id="@+id/recDeviceLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:text="@string/recording_device"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/apiSelectionGroup"/>
<com.google.oboe.samples.audio_device.AudioDeviceSpinner
android:id="@+id/recording_devices_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/recDeviceLabel" />
<TextView
android:id="@+id/playDeviceLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:text="@string/playback_device"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/recording_devices_spinner" />
<com.google.oboe.samples.audio_device.AudioDeviceSpinner
android:id="@+id/playback_devices_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginLeft="@dimen/activity_horizontal_margin"
android:layout_marginTop="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toBottomOf="@+id/playDeviceLabel" />
<Button
android:id="@+id/button_toggle_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center"
android:layout_marginTop="@dimen/activity_vertical_margin"
android:textAllCaps="false"
android:text="@string/start_effect"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/playback_devices_spinner" />
<TextView
android:id="@+id/status_view_text"
android:layout_width="0dp"
android:layout_height="60dp"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginEnd="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_group_margin"
android:lines="6"
android:text="@string/status_warning"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toBottomOf="@+id/button_toggle_effect"
app:layout_constraintBottom_toBottomOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="AppTheme" parent="android:Theme.Material.Light">
</style>
</resources>

View file

@ -0,0 +1,6 @@
<resources>
<!-- Example customization of dimensions originally defined in res/values/dimens.xml
(such as screen margins) for screens with more than 820dp of available width. This
would include 7" and 10" devices in landscape (~960dp and ~1280dp respectively). -->
<dimen name="activity_horizontal_margin">64dp</dimen>
</resources>

View file

@ -0,0 +1,3 @@
<resources>
<color name="colorBlue">#4444CC</color>
</resources>

View file

@ -0,0 +1,6 @@
<resources>
<!-- Default screen margins, per the Android Design guidelines. -->
<dimen name="activity_horizontal_margin">16dp</dimen>
<dimen name="activity_vertical_margin">16dp</dimen>
<dimen name="activity_vertical_group_margin">32dp</dimen>
</resources>

View file

@ -0,0 +1,19 @@
<resources>
<string name="app_name">LiveEffect</string>
<string name="action_settings">Settings</string>
<string name="start_effect">Start</string>
<string name="stop_effect">Stop</string>
<string name="need_record_audio_permission">"This sample needs RECORD_AUDIO permission"</string>
<string name="status_playing">Engine Playing ....</string>
<string name="status_open_failed">Engine Failed to Open Streams!</string>
<string name="status_record_audio_denied">Error: Permission for RECORD_AUDIO was denied</string>
<string name="status_touch_to_begin">RECORD_AUDIO permission granted, touch START to begin</string>
<string name="status_warning">Warning: If you run this sample using the built-in microphone
and speaker you may create a feedback loop which will not be pleasant to listen to.</string>
<string name="recording_device">Recording device</string>
<string name="playback_device">Playback device</string>
<string name="apiSelection">APIs</string>
<string name="aaudio">AAudio</string>
<string name="sles">OpenSL ES</string>
</resources>

View file

@ -0,0 +1,8 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="android:Theme.Holo.Light.DarkActionBar">
<!-- Customize your theme here. -->
</style>
</resources>

View file

@ -0,0 +1 @@
/build

View file

@ -0,0 +1,45 @@
Mega Drone
==========
Ever wondered what 100 square waves sound like when played together? Well now you can find out!
Mega Drone is an app which creates 100 oscillators, combines their output in a mixer and plays the resulting sound.
This sample demonstrates how to obtain the lowest latency and optimal computational throughput by:
1) Leaving Oboe to choose the best default stream properties for the current device
2) Setting performance mode to LowLatency
3) Setting sharing mode to Exclusive
4) Setting the buffer size to 2 bursts
5) Using the `-Ofast` compiler optimization flag, even when building the `Debug` variant
6) Using [`getExclusiveCores`](https://developer.android.com/reference/android/os/Process#getExclusiveCores()) (API 24+) and thread affinity to bind the audio thread to the best available CPU core(s)
This code was presented at [AES Milan](http://www.aes.org/events/144/) and [Droidcon Berlin](https://www.de.droidcon.com/) as part of a talk on Oboe.
The [following article explaining how to debug CPU performance problems](https://medium.com/@donturner/debugging-audio-glitches-on-android-ed10782f9c64) may also be useful when looking at this code.
Implementation details
---
The stream properties are left to Oboe as such the app must output audio data in a format which matches that of the stream.
Four different formats are supported:
|Channel count|Format|
|-------------|------|
|1 - Mono|16-bit int|
|2 - Stereo|16-bit int|
|1 - Mono|Float|
|2 - Stereo|Float|
The signal chain for mono streams is:
Oscillators->Mixer
For stereo chains a mono to stereo converter is added to the end of the chain:
Oscillators->Mixer->MonoToStereo
The compiler optimization flag `-Ofast` can be found in [CMakeLists.txt](CMakeLists.txt).
Screenshots
-----------
![megadrone-screenshot](megadrone-screenshot.png)

View file

@ -0,0 +1,53 @@
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
android {
defaultConfig {
applicationId "com.google.oboe.samples.megadrone"
minSdkVersion 21
targetSdkVersion 35
compileSdkVersion 35
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++17"
abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
}
}
}
signingConfigs {
release {
storeFile new File("${System.properties['user.home']}/.android/debug.keystore")
storePassword 'android'
storeType "jks"
keyAlias 'androiddebugkey'
keyPassword 'android'
}
}
buildTypes {
release {
signingConfig signingConfigs.release
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
debuggable false
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_18
targetCompatibility JavaVersion.VERSION_18
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
}
}
namespace 'com.google.oboe.samples.megadrone'
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.7.0'
implementation 'androidx.constraintlayout:constraintlayout:2.2.1'
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View file

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name="com.google.oboe.samples.megadrone.MainActivity"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View file

@ -0,0 +1,29 @@
cmake_minimum_required(VERSION 3.4.1)
### INCLUDE OBOE LIBRARY ###
# Set the path to the Oboe library directory
set (OBOE_DIR ../../../../..)
# Add the Oboe library as a subproject. Since Oboe is an out-of-tree source library we must also
# specify a binary directory
add_subdirectory(${OBOE_DIR} ./oboe-bin)
# Include the Oboe headers
include_directories(${OBOE_DIR}/include ${OBOE_DIR}/samples/shared ${OBOE_DIR}/samples/debug-utils)
### END OBOE INCLUDE SECTION ###
add_library( megadrone SHARED
native-lib.cpp
MegaDroneEngine.cpp
)
target_link_libraries(megadrone log oboe )
target_link_options(megadrone PRIVATE "-Wl,-z,max-page-size=16384")
# Enable optimization flags: if having problems with source level debugging,
# disable -Ofast ( and debug ), re-enable it after done debugging.
target_compile_options(megadrone PRIVATE -Wall -Werror -Ofast)

View file

@ -0,0 +1,126 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <memory>
#include "MegaDroneEngine.h"
/**
* Main audio engine for the MegaDrone sample. It is responsible for:
*
* - Creating the callback object which will be supplied when constructing the audio stream
* - Setting the CPU core IDs to which the callback thread should bind to
* - Creating the playback stream, including setting the callback object
* - Creating `Synth` which will render the audio inside the callback
* - Starting the playback stream
* - Restarting the playback stream when `restart()` is called by the callback object
*
* @param cpuIds
*/
MegaDroneEngine::MegaDroneEngine(std::vector<int> cpuIds) {
createCallback(cpuIds);
}
MegaDroneEngine::~MegaDroneEngine() {
if (mStream) {
LOGE("MegaDroneEngine destructor was called without calling stop()."
"Please call stop() to ensure stream resources are not leaked.");
stop();
}
}
void MegaDroneEngine::tap(bool isDown) {
mAudioSource->tap(isDown);
}
void MegaDroneEngine::restart() {
stop();
start();
}
// Create the playback stream
oboe::Result MegaDroneEngine::createPlaybackStream() {
oboe::AudioStreamBuilder builder;
return builder.setSharingMode(oboe::SharingMode::Exclusive)
->setPerformanceMode(oboe::PerformanceMode::LowLatency)
->setFormat(oboe::AudioFormat::Float)
->setDataCallback(mDataCallback)
->setErrorCallback(mErrorCallback)
->openStream(mStream);
}
// Create the callback and set its thread affinity to the supplied CPU core IDs
void MegaDroneEngine::createCallback(std::vector<int> cpuIds){
mDataCallback = std::make_shared<DefaultDataCallback>();
// Create the error callback, we supply ourselves as the parent so that we can restart the stream
// when it's disconnected
mErrorCallback = std::make_shared<DefaultErrorCallback>(*this);
// Bind the audio callback to specific CPU cores as this can help avoid underruns caused by
// core migrations
mDataCallback->setCpuIds(cpuIds);
mDataCallback->setThreadAffinityEnabled(true);
}
bool MegaDroneEngine::start() {
// It is possible for a stream's device to become disconnected during stream open or between
// stream open and stream start.
// If the stream fails to start, close the old stream and try again.
bool didStart = false;
int tryCount = 0;
do {
if (tryCount > 0) {
usleep(20 * 1000); // Sleep between tries to give the system time to settle.
}
didStart = attemptStart();
} while (!didStart && tryCount++ < 3);
if (!didStart) {
LOGE("Failed at starting the stream");
}
return didStart;
}
bool MegaDroneEngine::attemptStart() {
auto result = createPlaybackStream();
if (result == Result::OK) {
// Create our synthesizer audio source using the properties of the stream
mAudioSource = std::make_shared<Synth>(mStream->getSampleRate(), mStream->getChannelCount());
mDataCallback->reset();
mDataCallback->setSource(std::dynamic_pointer_cast<IRenderableAudio>(mAudioSource));
result = mStream->start();
if (result == Result::OK) {
return true;
} else {
LOGW("Failed attempt at starting the playback stream. Error: %s", convertToText(result));
return false;
}
} else {
LOGW("Failed attempt at creating the playback stream. Error: %s", convertToText(result));
return false;
}
}
bool MegaDroneEngine::stop() {
if(mStream && mStream->getState() != oboe::StreamState::Closed) {
mStream->stop();
mStream->close();
}
mStream.reset();
return true;
}

View file

@ -0,0 +1,59 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MEGADRONE_ENGINE_H
#define MEGADRONE_ENGINE_H
#include <oboe/Oboe.h>
#include <vector>
#include "Synth.h"
#include <DefaultDataCallback.h>
#include <TappableAudioSource.h>
#include <IRestartable.h>
#include <DefaultErrorCallback.h>
using namespace oboe;
class MegaDroneEngine : public IRestartable {
public:
MegaDroneEngine(std::vector<int> cpuIds);
virtual ~MegaDroneEngine();
void tap(bool isDown);
// from IRestartable
virtual void restart() override;
bool start();
bool stop();
private:
std::shared_ptr<AudioStream> mStream;
std::shared_ptr<TappableAudioSource> mAudioSource;
std::shared_ptr<DefaultDataCallback> mDataCallback;
std::shared_ptr<DefaultErrorCallback> mErrorCallback;
bool attemptStart();
oboe::Result createPlaybackStream();
void createCallback(std::vector<int> cpuIds);
};
#endif //MEGADRONE_ENGINE_H

View file

@ -0,0 +1,71 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MEGADRONE_SYNTH_H
#define MEGADRONE_SYNTH_H
#include <array>
#include <TappableAudioSource.h>
#include <Oscillator.h>
#include <Mixer.h>
#include <MonoToStereo.h>
constexpr int kNumOscillators = 100;
constexpr float kOscBaseFrequency = 116.0;
constexpr float kOscDivisor = 33;
constexpr float kOscAmplitude = 0.009;
class Synth : public TappableAudioSource {
public:
Synth(int32_t sampleRate, int32_t channelCount) :
TappableAudioSource(sampleRate, channelCount) {
for (int i = 0; i < kNumOscillators; ++i) {
mOscs[i].setSampleRate(mSampleRate);
mOscs[i].setFrequency(kOscBaseFrequency + (static_cast<float>(i) / kOscDivisor));
mOscs[i].setAmplitude(kOscAmplitude);
mMixer.addTrack(&mOscs[i]);
}
if (mChannelCount == oboe::ChannelCount::Stereo) {
mOutputStage = &mConverter;
} else {
mOutputStage = &mMixer;
}
}
void tap(bool isOn) override {
for (auto &osc : mOscs) osc.setWaveOn(isOn);
};
// From IRenderableAudio
void renderAudio(float *audioData, int32_t numFrames) override {
mOutputStage->renderAudio(audioData, numFrames);
};
virtual ~Synth() {
}
private:
// Rendering objects
std::array<Oscillator, kNumOscillators> mOscs;
Mixer mMixer;
MonoToStereo mConverter = MonoToStereo(&mMixer);
IRenderableAudio *mOutputStage; // This will point to either the mixer or converter, so it needs to be raw
};
#endif //MEGADRONE_SYNTH_H

View file

@ -0,0 +1,96 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <string>
#include <vector>
#include "MegaDroneEngine.h"
std::vector<int> convertJavaArrayToVector(JNIEnv *env, jintArray intArray) {
std::vector<int> v;
jsize length = env->GetArrayLength(intArray);
if (length > 0) {
jint *elements = env->GetIntArrayElements(intArray, nullptr);
v.insert(v.end(), &elements[0], &elements[length]);
// Unpin the memory for the array, or free the copy.
env->ReleaseIntArrayElements(intArray, elements, 0);
}
return v;
}
extern "C" {
/**
* Start the audio engine
*
* @param env
* @param instance
* @param jCpuIds - CPU core IDs which the audio process should affine to
* @return a pointer to the audio engine. This should be passed to other methods
*/
JNIEXPORT jlong JNICALL
Java_com_google_oboe_samples_megadrone_MainActivity_startEngine(JNIEnv *env, jobject /*unused*/,
jintArray jCpuIds) {
std::vector<int> cpuIds = convertJavaArrayToVector(env, jCpuIds);
LOGD("cpu ids size: %d", static_cast<int>(cpuIds.size()));
MegaDroneEngine *engine = new MegaDroneEngine(std::move(cpuIds));
if (!engine->start()) {
LOGE("Failed to start MegaDrone Engine");
delete engine;
engine = nullptr;
} else {
LOGD("Engine Started");
}
return reinterpret_cast<jlong>(engine);
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_megadrone_MainActivity_stopEngine(JNIEnv *env, jobject instance,
jlong jEngineHandle) {
auto engine = reinterpret_cast<MegaDroneEngine*>(jEngineHandle);
if (engine) {
engine->stop();
delete engine;
} else {
LOGD("Engine invalid, call startEngine() to create");
}
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_megadrone_MainActivity_tap(JNIEnv *env, jobject instance,
jlong jEngineHandle, jboolean isDown) {
auto *engine = reinterpret_cast<MegaDroneEngine*>(jEngineHandle);
if (engine) {
engine->tap(isDown);
} else {
LOGE("Engine handle is invalid, call createEngine() to create a new one");
}
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_megadrone_MainActivity_native_1setDefaultStreamValues(JNIEnv *env,
jclass type,
jint sampleRate,
jint framesPerBurst) {
oboe::DefaultStreamValues::SampleRate = (int32_t) sampleRate;
oboe::DefaultStreamValues::FramesPerBurst = (int32_t) framesPerBurst;
}
} // extern "C"

View file

@ -0,0 +1,106 @@
package com.google.oboe.samples.megadrone;
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import androidx.appcompat.app.AppCompatActivity;
import android.content.Context;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
public class MainActivity extends AppCompatActivity {
private final String TAG = MainActivity.class.toString();
private static long mEngineHandle = 0;
private native long startEngine(int[] cpuIds);
private native void stopEngine(long engineHandle);
private native void tap(long engineHandle, boolean isDown);
private static native void native_setDefaultStreamValues(int sampleRate, int framesPerBurst);
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("megadrone");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setDefaultStreamValues(this);
}
@Override
protected void onResume(){
super.onResume();
mEngineHandle = startEngine(getExclusiveCores());
}
@Override
protected void onPause(){
stopEngine(mEngineHandle);
super.onPause();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN){
tap(mEngineHandle, true);
} else if (event.getAction() == MotionEvent.ACTION_UP){
tap(mEngineHandle, false);
}
return super.onTouchEvent(event);
}
// Obtain CPU cores which are reserved for the foreground app. The audio thread can be
// bound to these cores to avoids the risk of it being migrated to slower or more contended
// core(s).
private int[] getExclusiveCores(){
int[] exclusiveCores = {};
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
Log.w(TAG, "getExclusiveCores() not supported. Only available on API " +
Build.VERSION_CODES.N + "+");
} else {
try {
exclusiveCores = android.os.Process.getExclusiveCores();
} catch (RuntimeException e){
Log.w(TAG, "getExclusiveCores() is not supported on this device.");
}
}
return exclusiveCores;
}
static void setDefaultStreamValues(Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1){
AudioManager myAudioMgr = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
String sampleRateStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
int defaultSampleRate = Integer.parseInt(sampleRateStr);
String framesPerBurstStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
int defaultFramesPerBurst = Integer.parseInt(framesPerBurstStr);
native_setDefaultStreamValues(defaultSampleRate, defaultFramesPerBurst);
}
}
}

View file

@ -0,0 +1,34 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeWidth="1"
android:strokeColor="#00000000">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View file

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#008577"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View file

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:id="@+id/sample_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Tap anywhere to play"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>

View file

@ -0,0 +1,3 @@
<resources>
<string name="app_name">Mega Drone</string>
</resources>

View file

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

61
externals/oboe/samples/README.md vendored Normal file
View file

@ -0,0 +1,61 @@
Oboe Samples
==============
These samples demonstrate how to use the Oboe library:
1. [MinimalOboe](minimaloboe): Just create an Oboe stream and play white noise. Restart stream when disconnected. (Kotlin/Compose)
1. [hello-oboe](hello-oboe): Creates an output (playback) stream and plays a
sine wave when you tap the screen. (Java)
1. [RhythmGame](RhythmGame): A simple rhythm game where you copy the clap patterns you hear by tapping on the screen.
There is an associated codelab to follow along with. (Java)
1. [MegaDrone](MegaDrone): A one hundred oscillator synthesizer, demonstrates low latency and CPU performance. (Java)
1. [DrumThumper](drumthumper): A drum pad that plays sounds from loaded WAV files. (Kotlin)
1. [LiveEffect](LiveEffect): Loops audio from input stream to output stream to demonstrate duplex capability. (Java)
1. [SoundBoard](SoundBoard): A 25 to 40 note dynamic synthesizer, demonstating combining signals. The stream restarts
when the display rotates. (Kotlin)
Pre-requisites
-------------
* Android device or emulator running API 16 (Jelly Bean) or above
* [Android SDK 26](https://developer.android.com/about/versions/oreo/android-8.0-migration.html#ptb)
* [NDK r17](https://developer.android.com/ndk/downloads/index.html) or above
* [Android Studio 2.3.0+](https://developer.android.com/studio/index.html)
Getting Started
---------------
1. [Install Android Studio](https://developer.android.com/studio/index.html)
1. Import the sample project into Android Studio
- File -> New -> Import Project
- Browse to oboe/samples/build.gradle
- Click "OK"
1. Click Run, click on the sample you wish to run
Support
-------
If you've found an error in these samples, please [file an issue](https://github.com/google/oboe/issues/new).
Patches are encouraged, and may be submitted by [forking this project](https://github.com/google/oboe/fork) and
submitting a pull request through GitHub. Please see [CONTRIBUTING.md](../CONTRIBUTING.md) for more details.
- [Stack Overflow](http://stackoverflow.com/questions/tagged/android-ndk)
- [Google+ Community](https://plus.google.com/communities/105153134372062985968)
- [Android Tools Feedback](http://tools.android.com/feedback)
License
-------
Copyright 2017 Google, Inc.
Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for
additional information regarding copyright ownership. The ASF licenses this
file to you under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.

View file

@ -0,0 +1,78 @@
cmake_minimum_required(VERSION 3.4.1)
include_directories(third_party)
include_directories(src/main/cpp/)
add_library( native-lib
SHARED
# main game files
src/main/cpp/native-lib.cpp
src/main/cpp/Game.cpp
# audio engine
src/main/cpp/audio/AAssetDataSource.cpp
src/main/cpp/audio/Player.cpp
# UI engine
src/main/cpp/ui/OpenGLFunctions.cpp
# utility functions
src/main/cpp/utils/logging.h
src/main/cpp/utils/UtilityFunctions.cpp
)
set (TARGET_LIBS log android oboe GLESv2)
if(${USE_FFMPEG})
MESSAGE(STATUS "Using FFmpeg extractor")
add_definitions(-DUSE_FFMPEG=1)
target_sources( native-lib PRIVATE src/main/cpp/audio/FFMpegExtractor.cpp )
# Add the local path to FFmpeg, you can use the ${ANDROID_ABI} variable to specify the ABI name
# e.g. /Users/donturner/Code/ffmpeg/build/${ANDROID_ABI}
set(FFMPEG_DIR "/path/to/ffmpeg")
include_directories(native-lib ${FFMPEG_DIR}/include)
add_library( avformat SHARED IMPORTED)
set_target_properties(avformat PROPERTIES IMPORTED_LOCATION
${FFMPEG_DIR}/lib/libavformat.so)
add_library( avutil SHARED IMPORTED)
set_target_properties(avutil PROPERTIES IMPORTED_LOCATION
${FFMPEG_DIR}/lib/libavutil.so)
add_library( avcodec SHARED IMPORTED)
set_target_properties(avcodec PROPERTIES IMPORTED_LOCATION
${FFMPEG_DIR}/lib/libavcodec.so)
add_library( swresample SHARED IMPORTED)
set_target_properties(swresample PROPERTIES IMPORTED_LOCATION
${FFMPEG_DIR}/lib/libswresample.so)
set (TARGET_LIBS ${TARGET_LIBS} avformat avutil avcodec swresample)
else()
MESSAGE(STATUS "Using NDK media extractor")
add_definitions(-DUSE_FFMPEG=0)
target_sources( native-lib PRIVATE src/main/cpp/audio/NDKExtractor.cpp )
set (TARGET_LIBS ${TARGET_LIBS} mediandk)
endif()
target_link_libraries(native-lib ${TARGET_LIBS} )
target_link_options(native-lib PRIVATE "-Wl,-z,max-page-size=16384")
# Set the path to the Oboe directory.
set (OBOE_DIR ../..)
# Add the Oboe library as a subdirectory in your project.
add_subdirectory (${OBOE_DIR} ./oboe-bin)
# Specify the path to the Oboe header files.
include_directories (${OBOE_DIR}/include ${OBOE_DIR}/samples)
# Enable optimization flags: if having problems with source level debugging,
# disable -Ofast ( and debug ), re-enable after done debugging.
target_compile_options(native-lib
PRIVATE -std=c++17 -Wall -Werror "$<$<CONFIG:RELEASE>:-Ofast>")

View file

@ -0,0 +1,92 @@
Rhythm Game sample
==================
This sample demonstrates how to build a simple musical game. The objective of the game is to clap in time to a song by copying what you hear. You do this by listening to the clap sounds, then tapping on the screen to copy those claps.
For a step-by-step guide on how this game works and how to build it check out this codelab: [Build a Musical Game using Oboe](https://developer.android.com/codelabs/musicalgame-using-oboe).
Screenshots
-----------
The UI is deliberately very simple - just tap anywhere in the grey area after hearing the claps. The UI will change color to indicate the game state. The colors are:
- Yellow: Game is loading (assets are being decompressed)
- Grey: Game is being played
- Orange: You tapped too early
- Green: You tapped on time
- Purple: You tapped too late
- Red: There was a problem loading the game (check logcat output)
![RhythmGame Screenshot](images/RhythmGame-screenshot.png)
### Audio timeline
![Game timeline](images/1-timeline.png "Game timeline")
The game plays the clap sounds on the first 3 beats of the bar. These are played in time with the backing track.
When the user taps on the screen, a clap sound is played and the game checks whether the tap occurred within an acceptable time window.
### Architecture
![Game architecture](images/2-architecture.png "Game architecture")
Oboe provides the [`AudioStream`](https://github.com/google/oboe/blob/main/include/oboe/AudioStream.h) class and associated objects to allow the sample to output audio data to the audio device. All other objects are provided by the sample.
Each time the `AudioStream` needs more audio data it calls [`AudioDataCallback::onAudioReady`](https://github.com/google/oboe/blob/main/include/oboe/AudioStreamCallback.h). This passes a container array named `audioData` to the `Game` object which must then fill the array with `numFrames` of audio frames.
![onAudioReady signature](images/3-audioData.png "onAudioReady signature")
### Latency optimizations
The sample uses the following optimizations to obtain a low latency audio stream:
- Performance mode set to [Low Latency](https://github.com/google/oboe/blob/main/FullGuide.md#setting-performance-mode)
- Sharing mode set to [Exclusive](https://github.com/google/oboe/blob/main/FullGuide.md#sharing-mode)
- Buffer size set to twice the number of frames in a burst (double buffering)
### Audio rendering
The `IRenderableAudio` interface (abstract class) represents objects which can produce frames of audio data. The `Player` and `Mixer` objects both implement this interface.
Both the clap sound and backing tracks are represented by `Player` objects which are then mixed together using a `Mixer`.
![Audio rendering](images/4-audio-rendering.png "Audio rendering")
### Sharing objects with the audio thread
It is very important that the audio thread (which calls the `onAudioReady` method) is never blocked. Blocking can cause underruns and audio glitches. To avoid blocking we use a `LockFreeQueue` to share information between the audio thread and other threads. The following diagram shows how claps are enqueued by pushing the clap times (in milliseconds) onto the queue, then dequeuing the clap time when the clap is played.
![Lock free queue](images/5-lockfreequeue.png "Lock free queue")
We also use [atomics](http://en.cppreference.com/w/cpp/atomic/atomic) to ensure that threads see a consistent view of any shared primitives.
### Keeping UI events and audio in sync
When a tap event arrives on the UI thread it only contains the time (milliseconds since boot) that the event occurred. We need to figure out what the song position was when the tap occurred.
To do this we keep track of the song position and the time it was last updated. These values are updated each time the `onAudioReady` method is called. This enables us to keep the UI in sync with the audio timeline.
![Audio/UI synchronization](images/6-audio-ui-sync.png "Audio/UI synchronization")
### Calculating whether a tap was successful
Once we know when the user tapped in the song, we can calculate whether that tap was successful i.e whether it fell within an acceptable time range. This range is known as the "tap window".
![Tap window calculation](images/7-tap-window.png "Tap window calculation")
Once we know the result of the tap the UI is updated with a color to give the user visual feedback. This is done in `getTapResult`.
Note that once a tap has been received the tap window is removed from the queue - the user only gets one chance to get their tap right!
### Use of compressed audio assets
In order to reduce APK size this game uses MP3 files for its audio assets. These are extracted on game startup in `AAssetDataSource::newFromCompressedAsset`. A yellow screen will be shown during this process.
By default the game uses `NDKExtractor` for asset extraction and decoding. Under the hood this uses the [NDK Media APIs](https://developer.android.com/ndk/reference/group/media).
There are some limitations with this approach:
- Only available on API 21 and above
- No resampling: The extracted output format will match the input format of the MP3. In this case a sample rate of 48000. If your audio stream's sample rate doesn't match the assets will not be extracted and an error will be displayed in logcat.
- 16-bit output only.
A faster, more versatile solution is to use [FFmpeg](https://www.ffmpeg.org/). To do this follow [the instructions here](https://medium.com/@donturner/using-ffmpeg-for-faster-audio-decoding-967894e94e71) and use the `ffmpegExtractor` build variant found in `app.gradle`. The extraction will then be done by `FFmpegExtractor`.

View file

@ -0,0 +1,77 @@
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
android {
defaultConfig {
applicationId "com.google.oboe.samples.rhythmgame"
targetSdkVersion 35
compileSdkVersion 35
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++17"
abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_18
targetCompatibility JavaVersion.VERSION_18
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
flavorDimensions "extractorLibrary"
productFlavors {
ndkExtractor {
dimension "extractorLibrary"
// Oboe has a minimum API of 16, but AMediaExtractor (used to extract the MP3 assets)
// is only available from API 21.
// For further backward compatibility consider using FFmpeg (see below)
minSdkVersion 21
externalNativeBuild {
cmake {
arguments "-DUSE_FFMPEG=0"
}
}
}
/**
* To use FFmpeg for asset extraction do the following:
* - Uncomment this block
* - Change the build variant to ffmpegExtractor
* - Update the FFMPEG_DIR variable in CMakeLists.txt to the local FFmpeg path
*/
/*
ffmpegExtractor {
dimension "extractorLibrary"
minSdkVersion 16
externalNativeBuild {
cmake {
arguments "-DUSE_FFMPEG=1"
}
}
}
*/
}
namespace 'com.google.oboe.samples.rhythmgame'
buildFeatures {
buildConfig true
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.7.0'
implementation 'androidx.constraintlayout:constraintlayout:2.2.1'
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 112 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,241 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utils/logging.h>
#include <thread>
#include <cinttypes>
#include "Game.h"
Game::Game(AAssetManager &assetManager): mAssetManager(assetManager) {
}
void Game::load() {
if (!openStream()) {
mGameState = GameState::FailedToLoad;
return;
}
if (!setupAudioSources()) {
mGameState = GameState::FailedToLoad;
return;
}
scheduleSongEvents();
Result result = mAudioStream->requestStart();
if (result != Result::OK){
LOGE("Failed to start stream. Error: %s", convertToText(result));
mGameState = GameState::FailedToLoad;
return;
}
mGameState = GameState::Playing;
}
void Game::start() {
// async returns a future, we must store this future to avoid blocking. It's not sufficient
// to store this in a local variable as its destructor will block until Game::load completes.
mLoadingResult = std::async(&Game::load, this);
}
void Game::stop(){
if (mAudioStream){
mAudioStream->stop();
mAudioStream->close();
mAudioStream.reset();
}
}
void Game::tap(int64_t eventTimeAsUptime) {
if (mGameState != GameState::Playing){
LOGW("Game not in playing state, ignoring tap event");
} else {
mClap->setPlaying(true);
int64_t nextClapWindowTimeMs;
if (mClapWindows.pop(nextClapWindowTimeMs)){
// Convert the tap time to a song position
int64_t tapTimeInSongMs = mSongPositionMs + (eventTimeAsUptime - mLastUpdateTime);
TapResult result = getTapResult(tapTimeInSongMs, nextClapWindowTimeMs);
mUiEvents.push(result);
}
}
}
void Game::tick(){
switch (mGameState){
case GameState::Playing:
TapResult r;
if (mUiEvents.pop(r)) {
renderEvent(r);
} else {
SetGLScreenColor(kPlayingColor);
}
break;
case GameState::Loading:
SetGLScreenColor(kLoadingColor);
break;
case GameState::FailedToLoad:
SetGLScreenColor(kLoadingFailedColor);
break;
}
}
void Game::onSurfaceCreated() {
SetGLScreenColor(kLoadingColor);
}
void Game::onSurfaceChanged(int widthInPixels, int heightInPixels) {
}
void Game::onSurfaceDestroyed() {
}
DataCallbackResult Game::onAudioReady(AudioStream *oboeStream, void *audioData, int32_t numFrames) {
auto *outputBuffer = static_cast<float *>(audioData);
int64_t nextClapEventMs;
for (int i = 0; i < numFrames; ++i) {
mSongPositionMs = convertFramesToMillis(
mCurrentFrame,
mAudioStream->getSampleRate());
if (mClapEvents.peek(nextClapEventMs) && mSongPositionMs >= nextClapEventMs){
mClap->setPlaying(true);
mClapEvents.pop(nextClapEventMs);
}
mMixer.renderAudio(outputBuffer+(oboeStream->getChannelCount()*i), 1);
mCurrentFrame++;
}
mLastUpdateTime = nowUptimeMillis();
return DataCallbackResult::Continue;
}
void Game::onErrorAfterClose(AudioStream *audioStream, Result error) {
if (error == Result::ErrorDisconnected){
mGameState = GameState::Loading;
mAudioStream.reset();
mMixer.removeAllTracks();
mCurrentFrame = 0;
mSongPositionMs = 0;
mLastUpdateTime = 0;
start();
} else {
LOGE("Stream error: %s", convertToText(error));
}
}
/**
* Get the result of a tap
*
* @param tapTimeInMillis - The time the tap occurred in milliseconds
* @param tapWindowInMillis - The time at the middle of the "tap window" in milliseconds
* @return TapResult can be Early, Late or Success
*/
TapResult Game::getTapResult(int64_t tapTimeInMillis, int64_t tapWindowInMillis){
LOGD("Tap time %" PRId64 ", tap window time: %" PRId64, tapTimeInMillis, tapWindowInMillis);
if (tapTimeInMillis <= tapWindowInMillis + kWindowCenterOffsetMs) {
if (tapTimeInMillis >= tapWindowInMillis - kWindowCenterOffsetMs) {
return TapResult::Success;
} else {
return TapResult::Early;
}
} else {
return TapResult::Late;
}
}
bool Game::openStream() {
// Create an audio stream
AudioStreamBuilder builder;
builder.setFormat(AudioFormat::Float);
builder.setFormatConversionAllowed(true);
builder.setPerformanceMode(PerformanceMode::LowLatency);
builder.setSharingMode(SharingMode::Exclusive);
builder.setSampleRate(48000);
builder.setSampleRateConversionQuality(
SampleRateConversionQuality::Medium);
builder.setChannelCount(2);
builder.setDataCallback(this);
builder.setErrorCallback(this);
Result result = builder.openStream(mAudioStream);
if (result != Result::OK){
LOGE("Failed to open stream. Error: %s", convertToText(result));
return false;
}
mMixer.setChannelCount(mAudioStream->getChannelCount());
return true;
}
bool Game::setupAudioSources() {
// Set the properties of our audio source(s) to match that of our audio stream
AudioProperties targetProperties {
.channelCount = mAudioStream->getChannelCount(),
.sampleRate = mAudioStream->getSampleRate()
};
// Create a data source and player for the clap sound
std::shared_ptr<AAssetDataSource> mClapSource {
AAssetDataSource::newFromCompressedAsset(mAssetManager, kClapFilename, targetProperties)
};
if (mClapSource == nullptr){
LOGE("Could not load source data for clap sound");
return false;
}
mClap = std::make_unique<Player>(mClapSource);
// Create a data source and player for our backing track
std::shared_ptr<AAssetDataSource> backingTrackSource {
AAssetDataSource::newFromCompressedAsset(mAssetManager, kBackingTrackFilename, targetProperties)
};
if (backingTrackSource == nullptr){
LOGE("Could not load source data for backing track");
return false;
}
mBackingTrack = std::make_unique<Player>(backingTrackSource);
mBackingTrack->setPlaying(true);
mBackingTrack->setLooping(true);
// Add both players to a mixer
mMixer.addTrack(mClap.get());
mMixer.addTrack(mBackingTrack.get());
return true;
}
void Game::scheduleSongEvents() {
for (auto t : kClapEvents) mClapEvents.push(t);
for (auto t : kClapWindows) mClapWindows.push(t);
}

View file

@ -0,0 +1,85 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_GAME_H
#define RHYTHMGAME_GAME_H
#include <future>
#include <android/asset_manager.h>
#include <oboe/Oboe.h>
#include "shared/Mixer.h"
#include "audio/Player.h"
#include "audio/AAssetDataSource.h"
#include "ui/OpenGLFunctions.h"
#include "utils/LockFreeQueue.h"
#include "utils/UtilityFunctions.h"
#include "GameConstants.h"
using namespace oboe;
enum class GameState {
Loading,
Playing,
FailedToLoad
};
class Game : public AudioStreamDataCallback, AudioStreamErrorCallback {
public:
explicit Game(AAssetManager&);
void start();
void stop();
void onSurfaceCreated();
void onSurfaceDestroyed();
void onSurfaceChanged(int widthInPixels, int heightInPixels);
void tick();
void tap(int64_t eventTimeAsUptime);
// Inherited from oboe::AudioStreamDataCallback.
DataCallbackResult
onAudioReady(AudioStream *oboeStream, void *audioData, int32_t numFrames) override;
// Inherited from oboe::AudioStreamErrorCallback.
void onErrorAfterClose(AudioStream *oboeStream, Result error) override;
private:
AAssetManager& mAssetManager;
std::shared_ptr<AudioStream> mAudioStream;
std::unique_ptr<Player> mClap;
std::unique_ptr<Player> mBackingTrack;
Mixer mMixer;
LockFreeQueue<int64_t, kMaxQueueItems> mClapEvents;
std::atomic<int64_t> mCurrentFrame { 0 };
std::atomic<int64_t> mSongPositionMs { 0 };
LockFreeQueue<int64_t, kMaxQueueItems> mClapWindows;
LockFreeQueue<TapResult, kMaxQueueItems> mUiEvents;
std::atomic<int64_t> mLastUpdateTime { 0 };
std::atomic<GameState> mGameState { GameState::Loading };
std::future<void> mLoadingResult;
void load();
TapResult getTapResult(int64_t tapTimeInMillis, int64_t tapWindowInMillis);
bool openStream();
bool setupAudioSources();
void scheduleSongEvents();
};
#endif //RHYTHMGAME_GAME_H

View file

@ -0,0 +1,60 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef SAMPLES_GAMECONSTANTS_H
#define SAMPLES_GAMECONSTANTS_H
#include "ui/OpenGLFunctions.h"
constexpr int kBufferSizeInBursts = 2; // Use 2 bursts as the buffer size (double buffer)
constexpr int kMaxQueueItems = 4; // Must be power of 2
// Colors for game states and visual feedback for taps
constexpr ScreenColor kPlayingColor = GREY;
constexpr ScreenColor kLoadingColor = YELLOW;
constexpr ScreenColor kLoadingFailedColor = RED;
constexpr ScreenColor kTapSuccessColor = GREEN;
constexpr ScreenColor kTapEarlyColor = ORANGE;
constexpr ScreenColor kTapLateColor = PURPLE;
// This defines the size of the tap window in milliseconds. For example, if defined at 100ms the
// player will have 100ms before and after the centre of the tap window to tap on the screen and
// be successful
constexpr int kWindowCenterOffsetMs = 100;
// Filename for clap sound asset (in assets folder)
constexpr char kClapFilename[] { "CLAP.mp3" };
// Filename for the backing track asset (in assets folder)
constexpr char kBackingTrackFilename[] { "FUNKY_HOUSE.mp3" };
// The game will first demonstrate the pattern which the user should copy. It does this by
// "clapping" (playing a clap sound) at certain times during the song. We can specify these times
// here in milliseconds. Our backing track has a tempo of 120 beats per minute, which is 2 beats per
// second. This means a pattern of 3 claps starting on the first beat of the first bar would mean
// playing claps at 0ms, 500ms and 1000ms
constexpr int64_t kClapEvents[] { 0, 500, 1000 };
// We then want the user to tap on the screen exactly 4 beats after the first clap so we add clap
// windows at 2000ms, 2500ms and 3000ms (or 2, 2.5 and 3 seconds). @see getTapResult for more info.
constexpr int64_t kClapWindows[] { 2000, 2500, 3000 };
struct AudioProperties {
int32_t channelCount;
int32_t sampleRate;
};
#endif //SAMPLES_GAMECONSTANTS_H

View file

@ -0,0 +1,86 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utils/logging.h>
#include <oboe/Oboe.h>
#include "AAssetDataSource.h"
#if !defined(USE_FFMPEG)
#error USE_FFMPEG should be defined in app.gradle
#endif
#if USE_FFMPEG==1
#include "FFMpegExtractor.h"
#else
#include "NDKExtractor.h"
#endif
constexpr int kMaxCompressionRatio { 12 };
AAssetDataSource* AAssetDataSource::newFromCompressedAsset(
AAssetManager &assetManager,
const char *filename,
const AudioProperties targetProperties) {
AAsset *asset = AAssetManager_open(&assetManager, filename, AASSET_MODE_UNKNOWN);
if (!asset) {
LOGE("Failed to open asset %s", filename);
return nullptr;
}
off_t assetSize = AAsset_getLength(asset);
LOGD("Opened %s, size %ld", filename, assetSize);
// Allocate memory to store the decompressed audio. We don't know the exact
// size of the decoded data until after decoding so we make an assumption about the
// maximum compression ratio and the decoded sample format (float for FFmpeg, int16 for NDK).
#if USE_FFMPEG==true
const long maximumDataSizeInBytes = kMaxCompressionRatio * assetSize * sizeof(float);
auto decodedData = new uint8_t[maximumDataSizeInBytes];
int64_t bytesDecoded = FFMpegExtractor::decode(asset, decodedData, targetProperties);
auto numSamples = bytesDecoded / sizeof(float);
#else
const long maximumDataSizeInBytes = kMaxCompressionRatio * assetSize * sizeof(int16_t);
auto decodedData = new uint8_t[maximumDataSizeInBytes];
int64_t bytesDecoded = NDKExtractor::decode(asset, decodedData, targetProperties);
auto numSamples = bytesDecoded / sizeof(int16_t);
#endif
// Now we know the exact number of samples we can create a float array to hold the audio data
auto outputBuffer = std::make_unique<float[]>(numSamples);
#if USE_FFMPEG==1
memcpy(outputBuffer.get(), decodedData, (size_t)bytesDecoded);
#else
// The NDK decoder can only decode to int16, we need to convert to floats
oboe::convertPcm16ToFloat(
reinterpret_cast<int16_t*>(decodedData),
outputBuffer.get(),
bytesDecoded / sizeof(int16_t));
#endif
delete[] decodedData;
AAsset_close(asset);
return new AAssetDataSource(std::move(outputBuffer),
numSamples,
targetProperties);
}

View file

@ -0,0 +1,50 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_AASSETDATASOURCE_H
#define RHYTHMGAME_AASSETDATASOURCE_H
#include <android/asset_manager.h>
#include <GameConstants.h>
#include "DataSource.h"
class AAssetDataSource : public DataSource {
public:
int64_t getSize() const override { return mBufferSize; }
AudioProperties getProperties() const override { return mProperties; }
const float* getData() const override { return mBuffer.get(); }
static AAssetDataSource* newFromCompressedAsset(
AAssetManager &assetManager,
const char *filename,
AudioProperties targetProperties);
private:
AAssetDataSource(std::unique_ptr<float[]> data, size_t size,
const AudioProperties properties)
: mBuffer(std::move(data))
, mBufferSize(size)
, mProperties(properties) {
}
const std::unique_ptr<float[]> mBuffer;
const int64_t mBufferSize;
const AudioProperties mProperties;
};
#endif //RHYTHMGAME_AASSETDATASOURCE_H

View file

@ -0,0 +1,32 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_AUDIOSOURCE_H
#define RHYTHMGAME_AUDIOSOURCE_H
#include <cstdint>
#include <GameConstants.h>
class DataSource {
public:
virtual ~DataSource(){};
virtual int64_t getSize() const = 0;
virtual AudioProperties getProperties() const = 0;
virtual const float* getData() const = 0;
};
#endif //RHYTHMGAME_AUDIOSOURCE_H

View file

@ -0,0 +1,318 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <memory>
#include <oboe/Definitions.h>
#include "FFMpegExtractor.h"
#include "utils/logging.h"
constexpr int kInternalBufferSize = 1152; // Use MP3 block size. https://wiki.hydrogenaud.io/index.php?title=MP3
int read(void *opaque, uint8_t *buf, int buf_size) {
auto asset = (AAsset *) opaque;
int bytesRead = AAsset_read(asset, buf, (size_t)buf_size);
return bytesRead;
}
int64_t seek(void *opaque, int64_t offset, int whence){
auto asset = (AAsset*)opaque;
// See https://www.ffmpeg.org/doxygen/3.0/avio_8h.html#a427ff2a881637b47ee7d7f9e368be63f
if (whence == AVSEEK_SIZE) return AAsset_getLength(asset);
if (AAsset_seek(asset, offset, whence) == -1){
return -1;
} else {
return 0;
}
}
bool FFMpegExtractor::createAVIOContext(AAsset *asset, uint8_t *buffer, uint32_t bufferSize,
AVIOContext **avioContext) {
constexpr int isBufferWriteable = 0;
*avioContext = avio_alloc_context(
buffer, // internal buffer for FFmpeg to use
bufferSize, // For optimal decoding speed this should be the protocol block size
isBufferWriteable,
asset, // Will be passed to our callback functions as a (void *)
read, // Read callback function
nullptr, // Write callback function (not used)
seek); // Seek callback function
if (*avioContext == nullptr){
LOGE("Failed to create AVIO context");
return false;
} else {
return true;
}
}
bool
FFMpegExtractor::createAVFormatContext(AVIOContext *avioContext, AVFormatContext **avFormatContext) {
*avFormatContext = avformat_alloc_context();
(*avFormatContext)->pb = avioContext;
if (*avFormatContext == nullptr){
LOGE("Failed to create AVFormatContext");
return false;
} else {
return true;
}
}
bool FFMpegExtractor::openAVFormatContext(AVFormatContext *avFormatContext) {
int result = avformat_open_input(&avFormatContext,
"", /* URL is left empty because we're providing our own I/O */
nullptr /* AVInputFormat *fmt */,
nullptr /* AVDictionary **options */
);
if (result == 0) {
return true;
} else {
LOGE("Failed to open file. Error code %s", av_err2str(result));
return false;
}
}
bool FFMpegExtractor::getStreamInfo(AVFormatContext *avFormatContext) {
int result = avformat_find_stream_info(avFormatContext, nullptr);
if (result == 0 ){
return true;
} else {
LOGE("Failed to find stream info. Error code %s", av_err2str(result));
return false;
}
}
AVStream *FFMpegExtractor::getBestAudioStream(AVFormatContext *avFormatContext) {
int streamIndex = av_find_best_stream(avFormatContext, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
if (streamIndex < 0){
LOGE("Could not find stream");
return nullptr;
} else {
return avFormatContext->streams[streamIndex];
}
}
int64_t FFMpegExtractor::decode(
AAsset *asset,
uint8_t *targetData,
AudioProperties targetProperties) {
LOGI("Decoder: FFMpeg");
int returnValue = -1; // -1 indicates error
// Create a buffer for FFmpeg to use for decoding (freed in the custom deleter below)
auto buffer = reinterpret_cast<uint8_t*>(av_malloc(kInternalBufferSize));
// Create an AVIOContext with a custom deleter
std::unique_ptr<AVIOContext, void(*)(AVIOContext *)> ioContext {
nullptr,
[](AVIOContext *c) {
av_free(c->buffer);
avio_context_free(&c);
}
};
{
AVIOContext *tmp = nullptr;
if (!createAVIOContext(asset, buffer, kInternalBufferSize, &tmp)){
LOGE("Could not create an AVIOContext");
return returnValue;
}
ioContext.reset(tmp);
}
// Create an AVFormatContext using the avformat_free_context as the deleter function
std::unique_ptr<AVFormatContext, decltype(&avformat_free_context)> formatContext {
nullptr,
&avformat_free_context
};
{
AVFormatContext *tmp;
if (!createAVFormatContext(ioContext.get(), &tmp)) return returnValue;
formatContext.reset(tmp);
}
if (!openAVFormatContext(formatContext.get())) return returnValue;
if (!getStreamInfo(formatContext.get())) return returnValue;
// Obtain the best audio stream to decode
AVStream *stream = getBestAudioStream(formatContext.get());
if (stream == nullptr || stream->codecpar == nullptr){
LOGE("Could not find a suitable audio stream to decode");
return returnValue;
}
printCodecParameters(stream->codecpar);
// Find the codec to decode this stream
AVCodec *codec = avcodec_find_decoder(stream->codecpar->codec_id);
if (!codec){
LOGE("Could not find codec with ID: %d", stream->codecpar->codec_id);
return returnValue;
}
// Create the codec context, specifying the deleter function
std::unique_ptr<AVCodecContext, void(*)(AVCodecContext *)> codecContext {
nullptr,
[](AVCodecContext *c) { avcodec_free_context(&c); }
};
{
AVCodecContext *tmp = avcodec_alloc_context3(codec);
if (!tmp){
LOGE("Failed to allocate codec context");
return returnValue;
}
codecContext.reset(tmp);
}
// Copy the codec parameters into the context
if (avcodec_parameters_to_context(codecContext.get(), stream->codecpar) < 0){
LOGE("Failed to copy codec parameters to codec context");
return returnValue;
}
// Open the codec
if (avcodec_open2(codecContext.get(), codec, nullptr) < 0){
LOGE("Could not open codec");
return returnValue;
}
// prepare resampler
int32_t outChannelLayout = (1 << targetProperties.channelCount) - 1;
LOGD("Channel layout %d", outChannelLayout);
SwrContext *swr = swr_alloc();
av_opt_set_int(swr, "in_channel_count", stream->codecpar->channels, 0);
av_opt_set_int(swr, "out_channel_count", targetProperties.channelCount, 0);
av_opt_set_int(swr, "in_channel_layout", stream->codecpar->channel_layout, 0);
av_opt_set_int(swr, "out_channel_layout", outChannelLayout, 0);
av_opt_set_int(swr, "in_sample_rate", stream->codecpar->sample_rate, 0);
av_opt_set_int(swr, "out_sample_rate", targetProperties.sampleRate, 0);
av_opt_set_int(swr, "in_sample_fmt", stream->codecpar->format, 0);
av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_FLT, 0);
av_opt_set_int(swr, "force_resampling", 1, 0);
// Check that resampler has been inited
int result = swr_init(swr);
if (result != 0){
LOGE("swr_init failed. Error: %s", av_err2str(result));
return returnValue;
};
if (!swr_is_initialized(swr)) {
LOGE("swr_is_initialized is false\n");
return returnValue;
}
// Prepare to read data
int bytesWritten = 0;
AVPacket avPacket; // Stores compressed audio data
av_init_packet(&avPacket);
AVFrame *decodedFrame = av_frame_alloc(); // Stores raw audio data
int bytesPerSample = av_get_bytes_per_sample((AVSampleFormat)stream->codecpar->format);
LOGD("Bytes per sample %d", bytesPerSample);
LOGD("DECODE START");
// While there is more data to read, read it into the avPacket
while (av_read_frame(formatContext.get(), &avPacket) == 0){
if (avPacket.stream_index == stream->index && avPacket.size > 0) {
// Pass our compressed data into the codec
result = avcodec_send_packet(codecContext.get(), &avPacket);
if (result != 0) {
LOGE("avcodec_send_packet error: %s", av_err2str(result));
goto cleanup;
}
// Retrieve our raw data from the codec
result = avcodec_receive_frame(codecContext.get(), decodedFrame);
if (result == AVERROR(EAGAIN)) {
// The codec needs more data before it can decode
LOGI("avcodec_receive_frame returned EAGAIN");
av_packet_unref(&avPacket);
continue;
} else if (result != 0) {
LOGE("avcodec_receive_frame error: %s", av_err2str(result));
goto cleanup;
}
// DO RESAMPLING
auto dst_nb_samples = (int32_t) av_rescale_rnd(
swr_get_delay(swr, decodedFrame->sample_rate) + decodedFrame->nb_samples,
targetProperties.sampleRate,
decodedFrame->sample_rate,
AV_ROUND_UP);
short *buffer1;
av_samples_alloc(
(uint8_t **) &buffer1,
nullptr,
targetProperties.channelCount,
dst_nb_samples,
AV_SAMPLE_FMT_FLT,
0);
int frame_count = swr_convert(
swr,
(uint8_t **) &buffer1,
dst_nb_samples,
(const uint8_t **) decodedFrame->data,
decodedFrame->nb_samples);
int64_t bytesToWrite = frame_count * sizeof(float) * targetProperties.channelCount;
memcpy(targetData + bytesWritten, buffer1, (size_t)bytesToWrite);
bytesWritten += bytesToWrite;
av_freep(&buffer1);
av_packet_unref(&avPacket);
}
}
av_frame_free(&decodedFrame);
LOGD("DECODE END");
returnValue = bytesWritten;
cleanup:
return returnValue;
}
void FFMpegExtractor::printCodecParameters(AVCodecParameters *params) {
LOGD("Stream properties");
LOGD("Channels: %d", params->channels);
LOGD("Channel layout: %" PRId64, params->channel_layout);
LOGD("Sample rate: %d", params->sample_rate);
LOGD("Format: %s", av_get_sample_fmt_name((AVSampleFormat)params->format));
LOGD("Frame size: %d", params->frame_size);
}

View file

@ -0,0 +1,55 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FFMPEG_FFMPEGEXTRACTOR_H
#define FFMPEG_FFMPEGEXTRACTOR_H
extern "C" {
#include <libavformat/avformat.h>
#include <libswresample/swresample.h>
#include <libavutil/opt.h>
}
#include <cstdint>
#include <android/asset_manager.h>
#include <GameConstants.h>
class FFMpegExtractor {
public:
static int64_t decode(AAsset *asset, uint8_t *targetData, AudioProperties targetProperties);
private:
static bool createAVIOContext(AAsset *asset, uint8_t *buffer, uint32_t bufferSize,
AVIOContext **avioContext);
static bool createAVFormatContext(AVIOContext *avioContext, AVFormatContext **avFormatContext);
static bool openAVFormatContext(AVFormatContext *avFormatContext);
static int32_t cleanup(AVIOContext *avioContext, AVFormatContext *avFormatContext);
static bool getStreamInfo(AVFormatContext *avFormatContext);
static AVStream *getBestAudioStream(AVFormatContext *avFormatContext);
static AVCodec *findCodec(AVCodecID id);
static void printCodecParameters(AVCodecParameters *params);
};
#endif //FFMPEG_FFMPEGEXTRACTOR_H

View file

@ -0,0 +1,199 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <sys/types.h>
#include <cstring>
#include <media/NdkMediaExtractor.h>
#include <utils/logging.h>
#include <cinttypes>
#include "NDKExtractor.h"
int32_t NDKExtractor::decode(AAsset *asset, uint8_t *targetData, AudioProperties targetProperties) {
LOGD("Using NDK decoder");
// open asset as file descriptor
off_t start, length;
int fd = AAsset_openFileDescriptor(asset, &start, &length);
// Extract the audio frames
AMediaExtractor *extractor = AMediaExtractor_new();
media_status_t amresult = AMediaExtractor_setDataSourceFd(extractor, fd,
static_cast<off64_t>(start),
static_cast<off64_t>(length));
if (amresult != AMEDIA_OK){
LOGE("Error setting extractor data source, err %d", amresult);
return 0;
}
// Specify our desired output format by creating it from our source
AMediaFormat *format = AMediaExtractor_getTrackFormat(extractor, 0);
int32_t sampleRate;
if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, &sampleRate)){
LOGD("Source sample rate %d", sampleRate);
if (sampleRate != targetProperties.sampleRate){
LOGE("Input (%d) and output (%d) sample rates do not match. "
"NDK decoder does not support resampling.",
sampleRate,
targetProperties.sampleRate);
return 0;
}
} else {
LOGE("Failed to get sample rate");
return 0;
};
int32_t channelCount;
if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, &channelCount)){
LOGD("Got channel count %d", channelCount);
if (channelCount != targetProperties.channelCount){
LOGE("NDK decoder does not support different "
"input (%d) and output (%d) channel counts",
channelCount,
targetProperties.channelCount);
}
} else {
LOGE("Failed to get channel count");
return 0;
}
const char *formatStr = AMediaFormat_toString(format);
LOGD("Output format %s", formatStr);
const char *mimeType;
if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mimeType)) {
LOGD("Got mime type %s", mimeType);
} else {
LOGE("Failed to get mime type");
return 0;
}
// Obtain the correct decoder
AMediaCodec *codec = nullptr;
AMediaExtractor_selectTrack(extractor, 0);
codec = AMediaCodec_createDecoderByType(mimeType);
AMediaCodec_configure(codec, format, nullptr, nullptr, 0);
AMediaCodec_start(codec);
// DECODE
bool isExtracting = true;
bool isDecoding = true;
int32_t bytesWritten = 0;
while(isExtracting || isDecoding){
if (isExtracting){
// Obtain the index of the next available input buffer
ssize_t inputIndex = AMediaCodec_dequeueInputBuffer(codec, 2000);
//LOGV("Got input buffer %d", inputIndex);
// The input index acts as a status if its negative
if (inputIndex < 0){
if (inputIndex == AMEDIACODEC_INFO_TRY_AGAIN_LATER){
// LOGV("Codec.dequeueInputBuffer try again later");
} else {
LOGE("Codec.dequeueInputBuffer unknown error status");
}
} else {
// Obtain the actual buffer and read the encoded data into it
size_t inputSize;
uint8_t *inputBuffer = AMediaCodec_getInputBuffer(codec, inputIndex, &inputSize);
//LOGV("Sample size is: %d", inputSize);
ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, inputBuffer, inputSize);
auto presentationTimeUs = AMediaExtractor_getSampleTime(extractor);
if (sampleSize > 0){
// Enqueue the encoded data
AMediaCodec_queueInputBuffer(codec, inputIndex, 0, sampleSize,
presentationTimeUs,
0);
AMediaExtractor_advance(extractor);
} else {
LOGD("End of extractor data stream");
isExtracting = false;
// We need to tell the codec that we've reached the end of the stream
AMediaCodec_queueInputBuffer(codec, inputIndex, 0, 0,
presentationTimeUs,
AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
}
}
}
if (isDecoding){
// Dequeue the decoded data
AMediaCodecBufferInfo info;
ssize_t outputIndex = AMediaCodec_dequeueOutputBuffer(codec, &info, 0);
if (outputIndex >= 0){
// Check whether this is set earlier
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM){
LOGD("Reached end of decoding stream");
isDecoding = false;
}
// Valid index, acquire buffer
size_t outputSize;
uint8_t *outputBuffer = AMediaCodec_getOutputBuffer(codec, outputIndex, &outputSize);
/*LOGV("Got output buffer index %d, buffer size: %d, info size: %d writing to pcm index %d",
outputIndex,
outputSize,
info.size,
m_writeIndex);*/
// copy the data out of the buffer
memcpy(targetData + bytesWritten, outputBuffer, info.size);
bytesWritten+=info.size;
AMediaCodec_releaseOutputBuffer(codec, outputIndex, false);
} else {
// The outputIndex doubles as a status return if its value is < 0
switch(outputIndex){
case AMEDIACODEC_INFO_TRY_AGAIN_LATER:
LOGD("dequeueOutputBuffer: try again later");
break;
case AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED:
LOGD("dequeueOutputBuffer: output buffers changed");
break;
case AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED:
LOGD("dequeueOutputBuffer: output outputFormat changed");
format = AMediaCodec_getOutputFormat(codec);
LOGD("outputFormat changed to: %s", AMediaFormat_toString(format));
break;
}
}
}
}
// Clean up
AMediaFormat_delete(format);
AMediaCodec_delete(codec);
AMediaExtractor_delete(extractor);
return bytesWritten;
}

View file

@ -0,0 +1,33 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FFMPEG_NDKMEDIAEXTRACTOR_H
#define FFMPEG_NDKMEDIAEXTRACTOR_H
#include <cstdint>
#include <android/asset_manager.h>
#include <GameConstants.h>
class NDKExtractor {
public:
static int32_t decode(AAsset *asset, uint8_t *targetData, AudioProperties targetProperties);
};
#endif //FFMPEG_NDKMEDIAEXTRACTOR_H

View file

@ -0,0 +1,59 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Player.h"
#include "utils/logging.h"
void Player::renderAudio(float *targetData, int32_t numFrames){
const AudioProperties properties = mSource->getProperties();
if (mIsPlaying){
int64_t framesToRenderFromData = numFrames;
int64_t totalSourceFrames = mSource->getSize() / properties.channelCount;
const float *data = mSource->getData();
// Check whether we're about to reach the end of the recording
if (!mIsLooping && mReadFrameIndex + numFrames >= totalSourceFrames){
framesToRenderFromData = totalSourceFrames - mReadFrameIndex;
mIsPlaying = false;
}
for (int i = 0; i < framesToRenderFromData; ++i) {
for (int j = 0; j < properties.channelCount; ++j) {
targetData[(i*properties.channelCount)+j] = data[(mReadFrameIndex*properties.channelCount)+j];
}
// Increment and handle wraparound
if (++mReadFrameIndex >= totalSourceFrames) mReadFrameIndex = 0;
}
if (framesToRenderFromData < numFrames){
// fill the rest of the buffer with silence
renderSilence(&targetData[framesToRenderFromData], numFrames * properties.channelCount);
}
} else {
renderSilence(targetData, numFrames * properties.channelCount);
}
}
void Player::renderSilence(float *start, int32_t numSamples){
for (int i = 0; i < numSamples; ++i) {
start[i] = 0;
}
}

View file

@ -0,0 +1,60 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_SOUNDRECORDING_H
#define RHYTHMGAME_SOUNDRECORDING_H
#include <cstdint>
#include <array>
#include <chrono>
#include <memory>
#include <atomic>
#include <android/asset_manager.h>
#include "shared/IRenderableAudio.h"
#include "DataSource.h"
class Player : public IRenderableAudio{
public:
/**
* Construct a new Player from the given DataSource. Players can share the same data source.
* For example, you could play two identical sounds concurrently by creating 2 Players with the
* same data source.
*
* @param source
*/
Player(std::shared_ptr<DataSource> source)
: mSource(source)
{};
void renderAudio(float *targetData, int32_t numFrames);
void resetPlayHead() { mReadFrameIndex = 0; };
void setPlaying(bool isPlaying) { mIsPlaying = isPlaying; resetPlayHead(); };
void setLooping(bool isLooping) { mIsLooping = isLooping; };
private:
int32_t mReadFrameIndex = 0;
std::atomic<bool> mIsPlaying { false };
std::atomic<bool> mIsLooping { false };
std::shared_ptr<DataSource> mSource;
void renderSilence(float*, int32_t);
};
#endif //RHYTHMGAME_SOUNDRECORDING_H

View file

@ -0,0 +1,93 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <memory>
#include <android/asset_manager_jni.h>
#include "utils/logging.h"
#include "Game.h"
extern "C" {
std::unique_ptr<Game> game;
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_MainActivity_native_1onStart(JNIEnv *env, jobject instance,
jobject jAssetManager) {
AAssetManager *assetManager = AAssetManager_fromJava(env, jAssetManager);
if (assetManager == nullptr) {
LOGE("Could not obtain the AAssetManager");
return;
}
game = std::make_unique<Game>(*assetManager);
game->start();
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_RendererWrapper_native_1onSurfaceCreated(JNIEnv *env,
jobject instance) {
game->onSurfaceCreated();
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_RendererWrapper_native_1onSurfaceChanged(JNIEnv *env,
jclass type,
jint width,
jint height) {
game->onSurfaceChanged(width, height);
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_RendererWrapper_native_1onDrawFrame(JNIEnv *env,
jclass type) {
game->tick();
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_GameSurfaceView_native_1onTouchInput(JNIEnv *env,
jclass type,
jint event_type,
jlong time_since_boot_ms,
jint pixel_x,
jint pixel_y) {
game->tap(time_since_boot_ms);
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_GameSurfaceView_native_1surfaceDestroyed__(JNIEnv *env,
jclass type) {
game->onSurfaceDestroyed();
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_MainActivity_native_1onStop(JNIEnv *env, jobject instance) {
game->stop();
}
JNIEXPORT void JNICALL
Java_com_google_oboe_samples_rhythmgame_MainActivity_native_1setDefaultStreamValues(JNIEnv *env,
jclass type,
jint sampleRate,
jint framesPerBurst) {
oboe::DefaultStreamValues::SampleRate = (int32_t) sampleRate;
oboe::DefaultStreamValues::FramesPerBurst = (int32_t) framesPerBurst;
}
} // extern "C"

View file

@ -0,0 +1,33 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "OpenGLFunctions.h"
#include "utils/logging.h"
void CheckOpenGLError(const char* stmt, const char* fname, int line)
{
GLenum err = glGetError();
if (err != GL_NO_ERROR)
{
LOGW("OpenGL error %08x, at %s:%i - for %s\n", err, fname, line, stmt);
assert(false);
}
}
void SetGLScreenColor(ScreenColor color){
glClearColor(color.red, color.green, color.blue, color.alpha);
glClear(GL_COLOR_BUFFER_BIT);
}

View file

@ -0,0 +1,58 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_OGL_H
#define ANDROID_OGL_H
#include <assert.h>
struct ScreenColor {
float red;
float green;
float blue;
float alpha;
};
constexpr ScreenColor RED { 1.0f, 0.0f, 0.0f, 1.0f };
constexpr ScreenColor GREEN { 0.0f, 1.0f, 0.0f, 1.0f };
constexpr ScreenColor BLUE { 0.0f, 0.0f, 1.0f, 1.0f };
constexpr ScreenColor PURPLE { 1.0f, 0.0f, 1.0f, 1.0f };
constexpr ScreenColor ORANGE { 1.0f, 0.5f, 0.0f, 1.0f };
constexpr ScreenColor GREY { 0.3f, 0.3f, 0.3f, 0.3f };
constexpr ScreenColor YELLOW { 1.0f, 1.0f, 0.0f, 1.0f };
#ifdef GL3
#include <GLES3/gl3.h>
#elif GL3_2
#include <GLES3/gl32.h>
#else
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#endif
void CheckOpenGLError(const char* stmt, const char* fname, int line);
#ifndef NDEBUG
#define GL_CHECK(stmt) \
stmt;\
CheckOpenGLError(#stmt, __FILE__, __LINE__);
#else
#define GL_CHECK(stmt) stmt
#endif
void SetGLScreenColor(ScreenColor color);
#endif //ANDROID_OGL_H

View file

@ -0,0 +1,154 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_LOCKFREEQUEUE_H
#define RHYTHMGAME_LOCKFREEQUEUE_H
#include <cstdint>
#include <atomic>
/**
* A lock-free queue for single consumer, single producer. Not thread-safe when using multiple
* consumers or producers.
*
* Example code:
*
* LockFreeQueue<int, 1024> myQueue;
* int value = 123;
* myQueue.push(value);
* myQueue.pop(value);
*
* @tparam T - The item type
* @tparam CAPACITY - Maximum number of items which can be held in the queue. Must be a power of 2.
* Must be less than the maximum value permissible in INDEX_TYPE
* @tparam INDEX_TYPE - The internal index type, defaults to uint32_t. Changing this will affect
* the maximum capacity. Included for ease of unit testing because testing queue lengths of
* UINT32_MAX can be time consuming and is not always possible.
*/
template <typename T, uint32_t CAPACITY, typename INDEX_TYPE = uint32_t>
class LockFreeQueue {
public:
/**
* Implementation details:
*
* We have 2 counters: readCounter and writeCounter. Each will increment until it reaches
* INDEX_TYPE_MAX, then wrap to zero. Unsigned integer overflow is defined behaviour in C++.
*
* Each time we need to access our data array we call mask() which gives us the index into the
* array. This approach avoids having a "dead item" in the buffer to distinguish between full
* and empty states. It also allows us to have a size() method which is easily calculated.
*
* IMPORTANT: This implementation is only thread-safe with a single reader thread and a single
* writer thread. Have more than one of either will result in Bad Things.
*/
static constexpr bool isPowerOfTwo(uint32_t n) { return (n & (n - 1)) == 0; }
static_assert(isPowerOfTwo(CAPACITY), "Capacity must be a power of 2");
static_assert(std::is_unsigned<INDEX_TYPE>::value, "Index type must be unsigned");
/**
* Pop a value off the head of the queue
*
* @param val - element will be stored in this variable
* @return true if value was popped successfully, false if the queue is empty
*/
bool pop(T &val) {
if (isEmpty()){
return false;
} else {
val = buffer[mask(readCounter)];
++readCounter;
return true;
}
}
/**
* Add an item to the back of the queue
*
* @param item - The item to add
* @return true if item was added, false if the queue was full
*/
bool push(const T& item) {
if (isFull()){
return false;
} else {
buffer[mask(writeCounter)] = item;
++writeCounter;
return true;
}
}
/**
* Get the item at the front of the queue but do not remove it
*
* @param item - item will be stored in this variable
* @return true if item was stored, false if the queue was empty
*/
bool peek(T &item) const {
if (isEmpty()){
return false;
} else {
item = buffer[mask(readCounter)];
return true;
}
}
/**
* Get the number of items in the queue
*
* @return number of items in the queue
*/
INDEX_TYPE size() const {
/**
* This is worth some explanation:
*
* Whilst writeCounter is greater than readCounter the result of (write - read) will always
* be positive. Simple.
*
* But when writeCounter is equal to INDEX_TYPE_MAX (e.g. UINT32_MAX) the next push will
* wrap it around to zero, the start of the buffer, making writeCounter less than
* readCounter so the result of (write - read) will be negative.
*
* But because we're returning an unsigned type return value will be as follows:
*
* returnValue = INDEX_TYPE_MAX - (write - read)
*
* e.g. if write is 0, read is 150 and the INDEX_TYPE is uint8_t where the max value is
* 255 the return value will be (255 - (0 - 150)) = 105.
*
*/
return writeCounter - readCounter;
};
private:
bool isEmpty() const { return readCounter == writeCounter; }
bool isFull() const { return size() == CAPACITY; }
INDEX_TYPE mask(INDEX_TYPE n) const { return static_cast<INDEX_TYPE>(n & (CAPACITY - 1)); }
T buffer[CAPACITY];
std::atomic<INDEX_TYPE> writeCounter { 0 };
std::atomic<INDEX_TYPE> readCounter { 0 };
};
#endif //RHYTHMGAME_LOCKFREEQUEUE_H

View file

@ -0,0 +1,40 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <chrono>
#include <ui/OpenGLFunctions.h>
#include <GameConstants.h>
#include "UtilityFunctions.h"
#include "logging.h"
int64_t nowUptimeMillis() {
using namespace std::chrono;
return duration_cast<milliseconds>(steady_clock::now().time_since_epoch()).count();
}
void renderEvent(TapResult r){
switch (r) {
case TapResult::Success:
SetGLScreenColor(kTapSuccessColor);
break;
case TapResult::Early:
SetGLScreenColor(kTapEarlyColor);
break;
case TapResult::Late:
SetGLScreenColor(kTapLateColor);
break;
}
}

View file

@ -0,0 +1,42 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RHYTHMGAME_UTILITYFUNCTIONS_H
#define RHYTHMGAME_UTILITYFUNCTIONS_H
#include <stdint.h>
constexpr int64_t kMillisecondsInSecond = 1000;
constexpr int64_t kNanosecondsInMillisecond = 1000000;
enum class TapResult {
Early,
Late,
Success
};
int64_t nowUptimeMillis();
constexpr int64_t convertFramesToMillis(const int64_t frames, const int sampleRate){
return static_cast<int64_t>((static_cast<double>(frames)/ sampleRate) * kMillisecondsInSecond);
}
TapResult getTapResult(int64_t tapTimeInMillis, int64_t tapWindowInMillis);
void renderEvent(TapResult r);
#endif //RHYTHMGAME_UTILITYFUNCTIONS_H

View file

@ -0,0 +1,32 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_LOGGING_H
#define ANDROID_LOGGING_H
#include <stdio.h>
#include <android/log.h>
#include <vector>
#define APP_NAME "RhythmGame"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, APP_NAME, __VA_ARGS__))
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, APP_NAME, __VA_ARGS__))
#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, APP_NAME, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, APP_NAME, __VA_ARGS__))
#endif //ANDROID_LOGGING_H

View file

@ -0,0 +1,64 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.oboe.samples.rhythmgame;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
public class GameSurfaceView extends GLSurfaceView {
public static native void native_onTouchInput(int eventType, long timeSinceBootMs, int pixel_x, int pixel_y);
public static native void native_surfaceDestroyed();
private final RendererWrapper mRenderer;
public GameSurfaceView(Context context) {
super(context);
setEGLContextClientVersion(2);
mRenderer = new RendererWrapper();
// Set the Renderer for drawing on the GLSurfaceView
setRenderer(mRenderer);
}
public GameSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
mRenderer = new RendererWrapper();
// Set the Renderer for drawing on the GLSurfaceView
setRenderer(mRenderer);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
native_surfaceDestroyed();
super.surfaceDestroyed(holder);
}
@Override
public boolean onTouchEvent(MotionEvent e) {
// MotionEvent reports input details from the touch screen
// and other input controls. In our case we care about DOWN events.
switch (e.getAction()) {
case MotionEvent.ACTION_DOWN:
native_onTouchInput(0, e.getEventTime(), (int)e.getX(), (int)e.getY());
break;
}
return true;
}
}

View file

@ -0,0 +1,75 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.oboe.samples.rhythmgame;
import android.content.Context;
import android.content.res.AssetManager;
import androidx.appcompat.app.AppCompatActivity;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.view.WindowManager;
public class MainActivity extends AppCompatActivity {
// Used to load the 'native-lib' library on application startup.
static {
if (BuildConfig.FLAVOR == "ffmpegExtractor"){
System.loadLibrary("avutil");
System.loadLibrary("swresample");
System.loadLibrary("avcodec");
System.loadLibrary("avformat");
}
System.loadLibrary("native-lib");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setDefaultStreamValues(this);
}
protected void onResume(){
super.onResume();
native_onStart(getAssets());
}
protected void onPause(){
native_onStop();
super.onPause();
}
static void setDefaultStreamValues(Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1){
AudioManager myAudioMgr = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
String sampleRateStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
int defaultSampleRate = Integer.parseInt(sampleRateStr);
String framesPerBurstStr = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
int defaultFramesPerBurst = Integer.parseInt(framesPerBurstStr);
native_setDefaultStreamValues(defaultSampleRate, defaultFramesPerBurst);
}
}
private native void native_onStart(AssetManager assetManager);
private native void native_onStop();
private static native void native_setDefaultStreamValues(int defaultSampleRate,
int defaultFramesPerBurst);
}

View file

@ -0,0 +1,41 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.oboe.samples.rhythmgame;
import android.opengl.GLSurfaceView;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class RendererWrapper implements GLSurfaceView.Renderer {
public static native void native_onSurfaceCreated();
public static native void native_onSurfaceChanged(int widthInPixels, int heightInPixels);
public static native void native_onDrawFrame();
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
native_onSurfaceCreated();
}
@Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
native_onSurfaceChanged(width, height);
}
@Override
public void onDrawFrame(GL10 gl10) {
native_onDrawFrame();
}
}

View file

@ -0,0 +1,34 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1" />
</vector>

View file

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>

View file

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.google.oboe.samples.rhythmgame.MainActivity">
<com.google.oboe.samples.rhythmgame.GameSurfaceView
android:id="@+id/glSurfaceView"
android:layout_width="match_parent"
android:layout_centerHorizontal="true"
android:layout_centerVertical="true"
android:layout_marginBottom="0dp"
android:layout_marginEnd="0dp"
android:layout_marginStart="0dp"
android:layout_marginTop="0dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
android:layout_height="match_parent" />
<TextView
android:id="@+id/instructionsBlock"
android:layout_width="294dp"
android:layout_height="159dp"
android:layout_marginStart="16dp"
android:layout_marginTop="16dp"
android:clickable="false"
android:ems="10"
android:focusable="false"
android:gravity="start|top"
android:inputType="textMultiLine"
android:text="See https://github.com/google/oboe/blob/main/samples/RhythmGame/README.md for instructions. There is a link to a codelab to follow along with."
android:textColor="#FFFFFF"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3 KiB

Some files were not shown because too many files have changed in this diff Show more