1
0
mirror of https://github.com/XProger/OpenLara.git synced 2025-08-13 16:44:50 +02:00

#15 Android add OpenSL ES support for low latency audio output

This commit is contained in:
XProger
2019-02-15 04:25:19 +03:00
parent 5d8e1c53e1
commit 60b2e89d43
4 changed files with 96 additions and 96 deletions

View File

@@ -12,4 +12,4 @@ add_library( game SHARED
include_directories(../../../)
target_link_libraries( game GLESv2 log )
target_link_libraries( game GLESv3 OpenSLES log )

View File

@@ -7,6 +7,7 @@
<supports-screens android:smallScreens="true" android:largeScreens="true" android:normalScreens="true" android:xlargeScreens="true" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<uses-feature android:name="android.hardware.audio.low_latency" />
<!--
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

View File

@@ -5,6 +5,8 @@
#include <math.h>
#include <pthread.h>
#include <cstring>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "game.h"
@@ -23,6 +25,88 @@ int osGetTime() {
return int((t.tv_sec - startTime) * 1000 + t.tv_usec / 1000);
}
// sound
#define SND_FRAMES 1176
Sound::Frame sndBuf[2][SND_FRAMES];
int sndBufIndex;
SLObjectItf sndEngine;
SLObjectItf sndOutput;
SLObjectItf sndPlayer;
SLBufferQueueItf sndQueue = NULL;
SLPlayItf sndPlay = NULL;
void sndFill(SLBufferQueueItf bq, void *context) {
if (!sndQueue) return;
Sound::fill(sndBuf[sndBufIndex ^= 1], SND_FRAMES);
(*sndQueue)->Enqueue(sndQueue, sndBuf[sndBufIndex], SND_FRAMES * sizeof(Sound::Frame));
}
void sndSetState(bool active) {
if (!sndPlay) return;
(*sndPlay)->SetPlayState(sndPlay, active ? SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_PAUSED);
}
void sndInit() {
slCreateEngine(&sndEngine, 0, NULL, 0, NULL, NULL);
(*sndEngine)->Realize(sndEngine, SL_BOOLEAN_FALSE);
SLEngineItf engine;
(*sndEngine)->GetInterface(sndEngine, SL_IID_ENGINE, &engine);
(*engine)->CreateOutputMix(engine, &sndOutput, 0, NULL, NULL);
(*sndOutput)->Realize(sndOutput, SL_BOOLEAN_FALSE);
SLDataFormat_PCM bufFormat;
bufFormat.formatType = SL_DATAFORMAT_PCM;
bufFormat.numChannels = 2;
bufFormat.samplesPerSec = SL_SAMPLINGRATE_44_1;
bufFormat.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
bufFormat.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
bufFormat.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT ;
bufFormat.endianness = SL_BYTEORDER_LITTLEENDIAN;
SLDataLocator_AndroidSimpleBufferQueue bufLocator;
bufLocator.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
bufLocator.numBuffers = 2;
SLDataLocator_OutputMix snkLocator;
snkLocator.locatorType = SL_DATALOCATOR_OUTPUTMIX;
snkLocator.outputMix = sndOutput;
SLDataSource audioSrc;
audioSrc.pLocator = &bufLocator;
audioSrc.pFormat = &bufFormat;
SLDataSink audioSnk;
audioSnk.pLocator = &snkLocator;
audioSnk.pFormat = NULL;
SLInterfaceID audioInt[] = { SL_IID_BUFFERQUEUE, SL_IID_PLAY };
SLboolean audioReq[] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
(*engine)->CreateAudioPlayer(engine, &sndPlayer, &audioSrc, &audioSnk, 2, audioInt, audioReq);
(*sndPlayer)->Realize(sndPlayer, SL_BOOLEAN_FALSE);
(*sndPlayer)->GetInterface(sndPlayer, SL_IID_BUFFERQUEUE, &sndQueue);
(*sndPlayer)->GetInterface(sndPlayer, SL_IID_PLAY, &sndPlay);
(*sndQueue)->RegisterCallback(sndQueue, sndFill, NULL);
sndBufIndex = 1;
sndFill(sndQueue, NULL);
sndFill(sndQueue, NULL);
}
void sndFree() {
if (sndPlayer) (*sndPlayer)->Destroy(sndPlayer);
if (sndOutput) (*sndOutput)->Destroy(sndOutput);
if (sndEngine) (*sndEngine)->Destroy(sndEngine);
sndPlayer = sndOutput = sndEngine = NULL;
sndQueue = NULL;
sndPlay = NULL;
}
// joystick
bool osJoyReady(int index) {
return index == 0;
@@ -65,12 +149,15 @@ JNI_METHOD(void, nativeInit)(JNIEnv* env, jobject obj, jstring jcontentDir, jstr
strcpy(saveDir, cacheDir);
sndInit();
glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*)&GAPI::defaultFBO);
Game::init();
}
JNI_METHOD(void, nativeFree)(JNIEnv* env) {
Game::deinit();
sndFree();
}
JNI_METHOD(void, nativeReset)(JNIEnv* env) {
@@ -182,11 +269,8 @@ JNI_METHOD(void, nativeSetEye)(JNIEnv* env, jobject obj, jint eye, jfloatArray p
env->ReleaseFloatArrayElements(view, mView, 0);
}
JNI_METHOD(void, nativeSoundFill)(JNIEnv* env, jobject obj, jshortArray buffer) {
jshort *frames = env->GetShortArrayElements(buffer, NULL);
jsize count = env->GetArrayLength(buffer) / 2;
Sound::fill((Sound::Frame*)frames, count);
env->ReleaseShortArrayElements(buffer, frames, 0);
JNI_METHOD(void, nativeSoundState)(JNIEnv* env, jobject obj, jboolean active) {
sndSetState(active);
}
}
}

View File

@@ -2,9 +2,6 @@ package org.xproger.openlara;
import java.util.ArrayList;
import javax.microedition.khronos.egl.EGLConfig;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Bundle;
import android.os.Environment;
import android.view.InputDevice;
@@ -14,16 +11,12 @@ import android.view.View;
import android.view.View.OnGenericMotionListener;
import android.view.View.OnKeyListener;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.view.WindowManager;
import com.google.vr.sdk.base.AndroidCompat;
import com.google.vr.sdk.base.Eye;
import com.google.vr.sdk.base.GvrActivity;
import com.google.vr.sdk.base.GvrView;
import com.google.vr.sdk.base.HeadTransform;
import com.google.vr.sdk.base.Viewport;
import android.app.Activity;
public class MainActivity extends GvrActivity implements OnTouchListener, OnKeyListener, OnGenericMotionListener {
static GvrView gvrView;
@@ -218,79 +211,6 @@ public class MainActivity extends GvrActivity implements OnTouchListener, OnKeyL
}
}
// @TODO: use native OpenSL ES
class Sound {
private short buffer[];
private static AudioTrack audioTrack;
void start(final Wrapper wrapper) {
int rate = 44100;
int size = AudioTrack.getMinBufferSize(rate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
size /= 2; // bytes -> words
while (size % 4704 != 0) size++;
//System.out.println(String.format("sound buffer size: %d", size));
buffer = new short[size];
try {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, size * 2, AudioTrack.MODE_STREAM);
}catch (IllegalArgumentException e){
System.out.println("Error: buffer size is zero");
return;
}
try {
audioTrack.play();
}catch (NullPointerException e){
System.out.println("Error: audioTrack null pointer on start()");
return;
}
new Thread( new Runnable() {
public void run() {
while ( audioTrack.getPlayState() != AudioTrack.PLAYSTATE_STOPPED ) {
if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING && wrapper.ready) {
Wrapper.nativeSoundFill(buffer);
audioTrack.write(buffer, 0, buffer.length);
audioTrack.flush();
} else
try {
Thread.sleep(10);
} catch(Exception e) {
//
}
}
}
} ).start();
}
void stop() {
try {
audioTrack.flush();
audioTrack.stop();
audioTrack.release();
}catch (NullPointerException e){
System.out.println("Error: audioTrack null pointer on stop()");
}
}
void play() {
try {
audioTrack.play();
}catch (NullPointerException e){
System.out.println("Error: audioTrack null pointer on play()");
}
}
void pause() {
try {
audioTrack.pause();
}catch (NullPointerException e){
System.out.println("Error: audioTrack null pointer on pause()");
};
}
}
class Touch {
int id, state;
float x, y;
@@ -315,34 +235,29 @@ class Wrapper implements GvrView.StereoRenderer {
public static native void nativeFrameEnd();
public static native void nativeFrameRender();
public static native void nativeTouch(int id, int state, float x, float y);
public static native void nativeSoundFill(short buffer[]);
public static native void nativeSoundState(boolean active);
Boolean ready = false;
Boolean toggleVR = false;
private String contentDir;
private String cacheDir;
private ArrayList<Touch> touch = new ArrayList<>();
private Sound sound;
void onCreate(String contentDir, String cacheDir) {
this.contentDir = contentDir;
this.cacheDir = cacheDir;
sound = new Sound();
sound.start(this);
}
void onDestroy() {
sound.stop();
nativeFree();
}
void onPause() {
sound.pause();
nativeSoundState(false);
}
void onResume() {
sound.play();
nativeSoundState(true);
if (ready) nativeReset();
}
@@ -361,7 +276,7 @@ class Wrapper implements GvrView.StereoRenderer {
public void onSurfaceCreated(EGLConfig config) {
if (!ready) {
nativeInit(contentDir, cacheDir);
sound.play();
nativeSoundState(true);
ready = true;
}
}