mirror of
https://github.com/libsdl-org/SDL.git
synced 2025-05-30 00:17:40 +00:00
Removed the ancient Android audio driver
OpenSLES is well supported on Android 4.0 and later.
This commit is contained in:
parent
446c05a001
commit
691cf15e75
10 changed files with 11 additions and 935 deletions
|
@ -1246,9 +1246,6 @@ if(ANDROID)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(SDL_AUDIO)
|
if(SDL_AUDIO)
|
||||||
set(SDL_AUDIO_DRIVER_ANDROID 1)
|
|
||||||
sdl_glob_sources("${SDL3_SOURCE_DIR}/src/audio/android/*.c")
|
|
||||||
|
|
||||||
set(SDL_AUDIO_DRIVER_OPENSLES 1)
|
set(SDL_AUDIO_DRIVER_OPENSLES 1)
|
||||||
sdl_glob_sources("${SDL3_SOURCE_DIR}/src/audio/openslES/*.c")
|
sdl_glob_sources("${SDL3_SOURCE_DIR}/src/audio/openslES/*.c")
|
||||||
|
|
||||||
|
|
|
@ -3,11 +3,7 @@ package org.libsdl.app;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioDeviceCallback;
|
import android.media.AudioDeviceCallback;
|
||||||
import android.media.AudioDeviceInfo;
|
import android.media.AudioDeviceInfo;
|
||||||
import android.media.AudioFormat;
|
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.AudioRecord;
|
|
||||||
import android.media.AudioTrack;
|
|
||||||
import android.media.MediaRecorder;
|
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
|
@ -17,15 +13,11 @@ import java.util.ArrayList;
|
||||||
public class SDLAudioManager {
|
public class SDLAudioManager {
|
||||||
protected static final String TAG = "SDLAudio";
|
protected static final String TAG = "SDLAudio";
|
||||||
|
|
||||||
protected static AudioTrack mAudioTrack;
|
|
||||||
protected static AudioRecord mAudioRecord;
|
|
||||||
protected static Context mContext;
|
protected static Context mContext;
|
||||||
|
|
||||||
private static AudioDeviceCallback mAudioDeviceCallback;
|
private static AudioDeviceCallback mAudioDeviceCallback;
|
||||||
|
|
||||||
public static void initialize() {
|
public static void initialize() {
|
||||||
mAudioTrack = null;
|
|
||||||
mAudioRecord = null;
|
|
||||||
mAudioDeviceCallback = null;
|
mAudioDeviceCallback = null;
|
||||||
|
|
||||||
if(Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */)
|
if(Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */)
|
||||||
|
@ -58,230 +50,6 @@ public class SDLAudioManager {
|
||||||
|
|
||||||
// Audio
|
// Audio
|
||||||
|
|
||||||
protected static String getAudioFormatString(int audioFormat) {
|
|
||||||
switch (audioFormat) {
|
|
||||||
case AudioFormat.ENCODING_PCM_8BIT:
|
|
||||||
return "8-bit";
|
|
||||||
case AudioFormat.ENCODING_PCM_16BIT:
|
|
||||||
return "16-bit";
|
|
||||||
case AudioFormat.ENCODING_PCM_FLOAT:
|
|
||||||
return "float";
|
|
||||||
default:
|
|
||||||
return Integer.toString(audioFormat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static int[] open(boolean recording, int sampleRate, int audioFormat, int desiredChannels, int desiredFrames, int deviceId) {
|
|
||||||
int channelConfig;
|
|
||||||
int sampleSize;
|
|
||||||
int frameSize;
|
|
||||||
|
|
||||||
Log.v(TAG, "Opening " + (recording ? "recording" : "playback") + ", requested " + desiredFrames + " frames of " + desiredChannels + " channel " + getAudioFormatString(audioFormat) + " audio at " + sampleRate + " Hz");
|
|
||||||
|
|
||||||
/* On older devices let's use known good settings */
|
|
||||||
if (Build.VERSION.SDK_INT < 21 /* Android 5.0 (LOLLIPOP) */) {
|
|
||||||
if (desiredChannels > 2) {
|
|
||||||
desiredChannels = 2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* AudioTrack has sample rate limitation of 48000 (fixed in 5.0.2) */
|
|
||||||
if (Build.VERSION.SDK_INT < 22 /* Android 5.1 (LOLLIPOP_MR1) */) {
|
|
||||||
if (sampleRate < 8000) {
|
|
||||||
sampleRate = 8000;
|
|
||||||
} else if (sampleRate > 48000) {
|
|
||||||
sampleRate = 48000;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (audioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
|
|
||||||
int minSDKVersion = (recording ? 23 /* Android 6.0 (M) */ : 21 /* Android 5.0 (LOLLIPOP) */);
|
|
||||||
if (Build.VERSION.SDK_INT < minSDKVersion) {
|
|
||||||
audioFormat = AudioFormat.ENCODING_PCM_16BIT;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
switch (audioFormat)
|
|
||||||
{
|
|
||||||
case AudioFormat.ENCODING_PCM_8BIT:
|
|
||||||
sampleSize = 1;
|
|
||||||
break;
|
|
||||||
case AudioFormat.ENCODING_PCM_16BIT:
|
|
||||||
sampleSize = 2;
|
|
||||||
break;
|
|
||||||
case AudioFormat.ENCODING_PCM_FLOAT:
|
|
||||||
sampleSize = 4;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
Log.v(TAG, "Requested format " + audioFormat + ", getting ENCODING_PCM_16BIT");
|
|
||||||
audioFormat = AudioFormat.ENCODING_PCM_16BIT;
|
|
||||||
sampleSize = 2;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
switch (desiredChannels) {
|
|
||||||
case 1:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_IN_MONO;
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_IN_STEREO;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
|
|
||||||
desiredChannels = 2;
|
|
||||||
channelConfig = AudioFormat.CHANNEL_IN_STEREO;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch (desiredChannels) {
|
|
||||||
case 1:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
|
|
||||||
break;
|
|
||||||
case 4:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
|
|
||||||
break;
|
|
||||||
case 5:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
|
|
||||||
break;
|
|
||||||
case 6:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
|
|
||||||
break;
|
|
||||||
case 7:
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
|
|
||||||
break;
|
|
||||||
case 8:
|
|
||||||
if (Build.VERSION.SDK_INT >= 23 /* Android 6.0 (M) */) {
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
|
|
||||||
} else {
|
|
||||||
Log.v(TAG, "Requested " + desiredChannels + " channels, getting 5.1 surround");
|
|
||||||
desiredChannels = 6;
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
|
|
||||||
desiredChannels = 2;
|
|
||||||
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
Log.v(TAG, "Speaker configuration (and order of channels):");
|
|
||||||
|
|
||||||
if ((channelConfig & 0x00000004) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000008) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000010) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_FRONT_CENTER");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000020) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_LOW_FREQUENCY");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000040) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_BACK_LEFT");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000080) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_BACK_RIGHT");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000100) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT_OF_CENTER");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000200) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT_OF_CENTER");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000400) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_BACK_CENTER");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00000800) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_SIDE_LEFT");
|
|
||||||
}
|
|
||||||
if ((channelConfig & 0x00001000) != 0) {
|
|
||||||
Log.v(TAG, " CHANNEL_OUT_SIDE_RIGHT");
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
frameSize = (sampleSize * desiredChannels);
|
|
||||||
|
|
||||||
// Let the user pick a larger buffer if they really want -- but ye
|
|
||||||
// gods they probably shouldn't, the minimums are horrifyingly high
|
|
||||||
// latency already
|
|
||||||
int minBufferSize;
|
|
||||||
if (recording) {
|
|
||||||
minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
|
|
||||||
} else {
|
|
||||||
minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
|
|
||||||
}
|
|
||||||
desiredFrames = Math.max(desiredFrames, (minBufferSize + frameSize - 1) / frameSize);
|
|
||||||
|
|
||||||
int[] results = new int[4];
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
if (mAudioRecord == null) {
|
|
||||||
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
|
|
||||||
channelConfig, audioFormat, desiredFrames * frameSize);
|
|
||||||
|
|
||||||
// see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
|
|
||||||
if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
|
||||||
Log.e(TAG, "Failed during initialization of AudioRecord");
|
|
||||||
mAudioRecord.release();
|
|
||||||
mAudioRecord = null;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */ && deviceId != 0) {
|
|
||||||
mAudioRecord.setPreferredDevice(getPlaybackAudioDeviceInfo(deviceId));
|
|
||||||
}
|
|
||||||
|
|
||||||
mAudioRecord.startRecording();
|
|
||||||
}
|
|
||||||
|
|
||||||
results[0] = mAudioRecord.getSampleRate();
|
|
||||||
results[1] = mAudioRecord.getAudioFormat();
|
|
||||||
results[2] = mAudioRecord.getChannelCount();
|
|
||||||
|
|
||||||
} else {
|
|
||||||
if (mAudioTrack == null) {
|
|
||||||
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
|
|
||||||
|
|
||||||
// Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
|
|
||||||
// Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
|
|
||||||
// Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
|
|
||||||
if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
|
|
||||||
/* Try again, with safer values */
|
|
||||||
|
|
||||||
Log.e(TAG, "Failed during initialization of Audio Track");
|
|
||||||
mAudioTrack.release();
|
|
||||||
mAudioTrack = null;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */ && deviceId != 0) {
|
|
||||||
mAudioTrack.setPreferredDevice(getInputAudioDeviceInfo(deviceId));
|
|
||||||
}
|
|
||||||
|
|
||||||
mAudioTrack.play();
|
|
||||||
}
|
|
||||||
|
|
||||||
results[0] = mAudioTrack.getSampleRate();
|
|
||||||
results[1] = mAudioTrack.getAudioFormat();
|
|
||||||
results[2] = mAudioTrack.getChannelCount();
|
|
||||||
}
|
|
||||||
results[3] = desiredFrames;
|
|
||||||
|
|
||||||
Log.v(TAG, "Opening " + (recording ? "recording" : "playback") + ", got " + results[3] + " frames of " + results[2] + " channel " + getAudioFormatString(results[1]) + " audio at " + results[0] + " Hz");
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static AudioDeviceInfo getInputAudioDeviceInfo(int deviceId) {
|
private static AudioDeviceInfo getInputAudioDeviceInfo(int deviceId) {
|
||||||
if (Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */) {
|
if (Build.VERSION.SDK_INT >= 24 /* Android 7.0 (N) */) {
|
||||||
AudioManager audioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
|
AudioManager audioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
|
||||||
|
@ -330,148 +98,6 @@ public class SDLAudioManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is called by SDL using JNI.
|
|
||||||
*/
|
|
||||||
public static int[] audioOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames, int deviceId) {
|
|
||||||
return open(false, sampleRate, audioFormat, desiredChannels, desiredFrames, deviceId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is called by SDL using JNI.
|
|
||||||
*/
|
|
||||||
public static void audioWriteFloatBuffer(float[] buffer) {
|
|
||||||
if (mAudioTrack == null) {
|
|
||||||
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (android.os.Build.VERSION.SDK_INT < 21 /* Android 5.0 (LOLLIPOP) */) {
|
|
||||||
Log.e(TAG, "Attempted to make an incompatible audio call with uninitialized audio! (floating-point output is supported since Android 5.0 Lollipop)");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < buffer.length;) {
|
|
||||||
int result = mAudioTrack.write(buffer, i, buffer.length - i, AudioTrack.WRITE_BLOCKING);
|
|
||||||
if (result > 0) {
|
|
||||||
i += result;
|
|
||||||
} else if (result == 0) {
|
|
||||||
try {
|
|
||||||
Thread.sleep(1);
|
|
||||||
} catch(InterruptedException e) {
|
|
||||||
// Nom nom
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Log.w(TAG, "SDL audio: error return from write(float)");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is called by SDL using JNI.
|
|
||||||
*/
|
|
||||||
public static void audioWriteShortBuffer(short[] buffer) {
|
|
||||||
if (mAudioTrack == null) {
|
|
||||||
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < buffer.length;) {
|
|
||||||
int result = mAudioTrack.write(buffer, i, buffer.length - i);
|
|
||||||
if (result > 0) {
|
|
||||||
i += result;
|
|
||||||
} else if (result == 0) {
|
|
||||||
try {
|
|
||||||
Thread.sleep(1);
|
|
||||||
} catch(InterruptedException e) {
|
|
||||||
// Nom nom
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Log.w(TAG, "SDL audio: error return from write(short)");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is called by SDL using JNI.
|
|
||||||
*/
|
|
||||||
public static void audioWriteByteBuffer(byte[] buffer) {
|
|
||||||
if (mAudioTrack == null) {
|
|
||||||
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < buffer.length; ) {
|
|
||||||
int result = mAudioTrack.write(buffer, i, buffer.length - i);
|
|
||||||
if (result > 0) {
|
|
||||||
i += result;
|
|
||||||
} else if (result == 0) {
|
|
||||||
try {
|
|
||||||
Thread.sleep(1);
|
|
||||||
} catch(InterruptedException e) {
|
|
||||||
// Nom nom
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Log.w(TAG, "SDL audio: error return from write(byte)");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is called by SDL using JNI.
|
|
||||||
*/
|
|
||||||
public static int[] recordingOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames, int deviceId) {
|
|
||||||
return open(true, sampleRate, audioFormat, desiredChannels, desiredFrames, deviceId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
|
||||||
public static int recordingReadFloatBuffer(float[] buffer, boolean blocking) {
|
|
||||||
if (Build.VERSION.SDK_INT < 23 /* Android 6.0 (M) */) {
|
|
||||||
return 0;
|
|
||||||
} else {
|
|
||||||
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
|
||||||
public static int recordingReadShortBuffer(short[] buffer, boolean blocking) {
|
|
||||||
if (Build.VERSION.SDK_INT < 23 /* Android 6.0 (M) */) {
|
|
||||||
return mAudioRecord.read(buffer, 0, buffer.length);
|
|
||||||
} else {
|
|
||||||
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
|
||||||
public static int recordingReadByteBuffer(byte[] buffer, boolean blocking) {
|
|
||||||
if (Build.VERSION.SDK_INT < 23 /* Android 6.0 (M) */) {
|
|
||||||
return mAudioRecord.read(buffer, 0, buffer.length);
|
|
||||||
} else {
|
|
||||||
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
|
||||||
public static void audioClose() {
|
|
||||||
if (mAudioTrack != null) {
|
|
||||||
mAudioTrack.stop();
|
|
||||||
mAudioTrack.release();
|
|
||||||
mAudioTrack = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
|
||||||
public static void recordingClose() {
|
|
||||||
if (mAudioRecord != null) {
|
|
||||||
mAudioRecord.stop();
|
|
||||||
mAudioRecord.release();
|
|
||||||
mAudioRecord = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method is called by SDL using JNI. */
|
/** This method is called by SDL using JNI. */
|
||||||
public static void audioSetThreadPriority(boolean recording, int device_id) {
|
public static void audioSetThreadPriority(boolean recording, int device_id) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -266,7 +266,6 @@
|
||||||
/* Enable various audio drivers */
|
/* Enable various audio drivers */
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_ALSA @SDL_AUDIO_DRIVER_ALSA@
|
#cmakedefine SDL_AUDIO_DRIVER_ALSA @SDL_AUDIO_DRIVER_ALSA@
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_ALSA_DYNAMIC @SDL_AUDIO_DRIVER_ALSA_DYNAMIC@
|
#cmakedefine SDL_AUDIO_DRIVER_ALSA_DYNAMIC @SDL_AUDIO_DRIVER_ALSA_DYNAMIC@
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_ANDROID @SDL_AUDIO_DRIVER_ANDROID@
|
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_OPENSLES @SDL_AUDIO_DRIVER_OPENSLES@
|
#cmakedefine SDL_AUDIO_DRIVER_OPENSLES @SDL_AUDIO_DRIVER_OPENSLES@
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_AAUDIO @SDL_AUDIO_DRIVER_AAUDIO@
|
#cmakedefine SDL_AUDIO_DRIVER_AAUDIO @SDL_AUDIO_DRIVER_AAUDIO@
|
||||||
#cmakedefine SDL_AUDIO_DRIVER_COREAUDIO @SDL_AUDIO_DRIVER_COREAUDIO@
|
#cmakedefine SDL_AUDIO_DRIVER_COREAUDIO @SDL_AUDIO_DRIVER_COREAUDIO@
|
||||||
|
|
|
@ -144,7 +144,6 @@
|
||||||
|
|
||||||
/* Enable various audio drivers */
|
/* Enable various audio drivers */
|
||||||
#ifndef SDL_AUDIO_DISABLED
|
#ifndef SDL_AUDIO_DISABLED
|
||||||
#define SDL_AUDIO_DRIVER_ANDROID 1
|
|
||||||
#define SDL_AUDIO_DRIVER_OPENSLES 1
|
#define SDL_AUDIO_DRIVER_OPENSLES 1
|
||||||
#define SDL_AUDIO_DRIVER_AAUDIO 1
|
#define SDL_AUDIO_DRIVER_AAUDIO 1
|
||||||
#endif /* SDL_AUDIO_DISABLED */
|
#endif /* SDL_AUDIO_DISABLED */
|
||||||
|
|
|
@ -62,9 +62,6 @@ static const AudioBootStrap *const bootstrap[] = {
|
||||||
#ifdef SDL_AUDIO_DRIVER_OPENSLES
|
#ifdef SDL_AUDIO_DRIVER_OPENSLES
|
||||||
&OPENSLES_bootstrap,
|
&OPENSLES_bootstrap,
|
||||||
#endif
|
#endif
|
||||||
#ifdef SDL_AUDIO_DRIVER_ANDROID
|
|
||||||
&ANDROIDAUDIO_bootstrap,
|
|
||||||
#endif
|
|
||||||
#ifdef SDL_AUDIO_DRIVER_PS2
|
#ifdef SDL_AUDIO_DRIVER_PS2
|
||||||
&PS2AUDIO_bootstrap,
|
&PS2AUDIO_bootstrap,
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -372,7 +372,6 @@ extern AudioBootStrap DISKAUDIO_bootstrap;
|
||||||
extern AudioBootStrap DUMMYAUDIO_bootstrap;
|
extern AudioBootStrap DUMMYAUDIO_bootstrap;
|
||||||
extern AudioBootStrap AAUDIO_bootstrap;
|
extern AudioBootStrap AAUDIO_bootstrap;
|
||||||
extern AudioBootStrap OPENSLES_bootstrap;
|
extern AudioBootStrap OPENSLES_bootstrap;
|
||||||
extern AudioBootStrap ANDROIDAUDIO_bootstrap;
|
|
||||||
extern AudioBootStrap PS2AUDIO_bootstrap;
|
extern AudioBootStrap PS2AUDIO_bootstrap;
|
||||||
extern AudioBootStrap PSPAUDIO_bootstrap;
|
extern AudioBootStrap PSPAUDIO_bootstrap;
|
||||||
extern AudioBootStrap VITAAUD_bootstrap;
|
extern AudioBootStrap VITAAUD_bootstrap;
|
||||||
|
|
|
@ -1,191 +0,0 @@
|
||||||
/*
|
|
||||||
Simple DirectMedia Layer
|
|
||||||
Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
|
|
||||||
|
|
||||||
This software is provided 'as-is', without any express or implied
|
|
||||||
warranty. In no event will the authors be held liable for any damages
|
|
||||||
arising from the use of this software.
|
|
||||||
|
|
||||||
Permission is granted to anyone to use this software for any purpose,
|
|
||||||
including commercial applications, and to alter it and redistribute it
|
|
||||||
freely, subject to the following restrictions:
|
|
||||||
|
|
||||||
1. The origin of this software must not be misrepresented; you must not
|
|
||||||
claim that you wrote the original software. If you use this software
|
|
||||||
in a product, an acknowledgment in the product documentation would be
|
|
||||||
appreciated but is not required.
|
|
||||||
2. Altered source versions must be plainly marked as such, and must not be
|
|
||||||
misrepresented as being the original software.
|
|
||||||
3. This notice may not be removed or altered from any source distribution.
|
|
||||||
*/
|
|
||||||
#include "SDL_internal.h"
|
|
||||||
|
|
||||||
#ifdef SDL_AUDIO_DRIVER_ANDROID
|
|
||||||
|
|
||||||
// Output audio to Android (legacy interface)
|
|
||||||
|
|
||||||
#include "../SDL_sysaudio.h"
|
|
||||||
#include "SDL_androidaudio.h"
|
|
||||||
|
|
||||||
#include "../../core/android/SDL_android.h"
|
|
||||||
#include <android/log.h>
|
|
||||||
|
|
||||||
|
|
||||||
struct SDL_PrivateAudioData
|
|
||||||
{
|
|
||||||
int resume; // Resume device if it was paused automatically
|
|
||||||
};
|
|
||||||
|
|
||||||
static SDL_AudioDevice *playbackDevice = NULL;
|
|
||||||
static SDL_AudioDevice *recordingDevice = NULL;
|
|
||||||
|
|
||||||
static int ANDROIDAUDIO_OpenDevice(SDL_AudioDevice *device)
|
|
||||||
{
|
|
||||||
device->hidden = (struct SDL_PrivateAudioData *)SDL_calloc(1, sizeof(*device->hidden));
|
|
||||||
if (!device->hidden) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const SDL_bool recording = device->recording;
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
if (recordingDevice) {
|
|
||||||
return SDL_SetError("An audio recording device is already opened");
|
|
||||||
}
|
|
||||||
recordingDevice = device;
|
|
||||||
} else {
|
|
||||||
if (playbackDevice) {
|
|
||||||
return SDL_SetError("An audio playback device is already opened");
|
|
||||||
}
|
|
||||||
playbackDevice = device;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_AudioFormat test_format;
|
|
||||||
const SDL_AudioFormat *closefmts = SDL_ClosestAudioFormats(device->spec.format);
|
|
||||||
while ((test_format = *(closefmts++)) != 0) {
|
|
||||||
if ((test_format == SDL_AUDIO_U8) ||
|
|
||||||
(test_format == SDL_AUDIO_S16) ||
|
|
||||||
(test_format == SDL_AUDIO_F32)) {
|
|
||||||
device->spec.format = test_format;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!test_format) {
|
|
||||||
return SDL_SetError("android: Unsupported audio format");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Android_JNI_OpenAudioDevice(device) < 0) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_UpdatedAudioDeviceFormat(device);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// !!! FIXME: this needs a WaitDevice implementation.
|
|
||||||
|
|
||||||
static int ANDROIDAUDIO_PlayDevice(SDL_AudioDevice *device, const Uint8 *buffer, int buflen)
|
|
||||||
{
|
|
||||||
Android_JNI_WriteAudioBuffer();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Uint8 *ANDROIDAUDIO_GetDeviceBuf(SDL_AudioDevice *device, int *buffer_size)
|
|
||||||
{
|
|
||||||
return Android_JNI_GetAudioBuffer();
|
|
||||||
}
|
|
||||||
|
|
||||||
static int ANDROIDAUDIO_RecordDevice(SDL_AudioDevice *device, void *buffer, int buflen)
|
|
||||||
{
|
|
||||||
return Android_JNI_RecordAudioBuffer(buffer, buflen);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void ANDROIDAUDIO_FlushRecording(SDL_AudioDevice *device)
|
|
||||||
{
|
|
||||||
Android_JNI_FlushRecordedAudio();
|
|
||||||
}
|
|
||||||
|
|
||||||
static void ANDROIDAUDIO_CloseDevice(SDL_AudioDevice *device)
|
|
||||||
{
|
|
||||||
/* At this point SDL_CloseAudioDevice via close_audio_device took care of terminating the audio thread
|
|
||||||
so it's safe to terminate the Java side buffer and AudioTrack
|
|
||||||
*/
|
|
||||||
if (device->hidden) {
|
|
||||||
Android_JNI_CloseAudioDevice(device->recording);
|
|
||||||
if (device->recording) {
|
|
||||||
SDL_assert(recordingDevice == device);
|
|
||||||
recordingDevice = NULL;
|
|
||||||
} else {
|
|
||||||
SDL_assert(playbackDevice == device);
|
|
||||||
playbackDevice = NULL;
|
|
||||||
}
|
|
||||||
SDL_free(device->hidden);
|
|
||||||
device->hidden = NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pause (block) all non already paused audio devices by taking their mixer lock
|
|
||||||
void ANDROIDAUDIO_PauseDevices(void)
|
|
||||||
{
|
|
||||||
// TODO: Handle multiple devices?
|
|
||||||
struct SDL_PrivateAudioData *hidden;
|
|
||||||
if (playbackDevice && playbackDevice->hidden) {
|
|
||||||
hidden = (struct SDL_PrivateAudioData *)playbackDevice->hidden;
|
|
||||||
SDL_LockMutex(playbackDevice->lock);
|
|
||||||
hidden->resume = SDL_TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recordingDevice && recordingDevice->hidden) {
|
|
||||||
hidden = (struct SDL_PrivateAudioData *)recordingDevice->hidden;
|
|
||||||
SDL_LockMutex(recordingDevice->lock);
|
|
||||||
hidden->resume = SDL_TRUE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resume (unblock) all non already paused audio devices by releasing their mixer lock
|
|
||||||
void ANDROIDAUDIO_ResumeDevices(void)
|
|
||||||
{
|
|
||||||
// TODO: Handle multiple devices?
|
|
||||||
struct SDL_PrivateAudioData *hidden;
|
|
||||||
if (playbackDevice && playbackDevice->hidden) {
|
|
||||||
hidden = (struct SDL_PrivateAudioData *)playbackDevice->hidden;
|
|
||||||
if (hidden->resume) {
|
|
||||||
hidden->resume = SDL_FALSE;
|
|
||||||
SDL_UnlockMutex(playbackDevice->lock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recordingDevice && recordingDevice->hidden) {
|
|
||||||
hidden = (struct SDL_PrivateAudioData *)recordingDevice->hidden;
|
|
||||||
if (hidden->resume) {
|
|
||||||
hidden->resume = SDL_FALSE;
|
|
||||||
SDL_UnlockMutex(recordingDevice->lock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static SDL_bool ANDROIDAUDIO_Init(SDL_AudioDriverImpl *impl)
|
|
||||||
{
|
|
||||||
// !!! FIXME: if on Android API < 24, DetectDevices and Deinitialize should be NULL and OnlyHasDefaultPlaybackDevice and OnlyHasDefaultRecordingDevice should be SDL_TRUE, since audio device enum and hotplug appears to require Android 7.0+.
|
|
||||||
impl->ThreadInit = Android_AudioThreadInit;
|
|
||||||
impl->DetectDevices = Android_StartAudioHotplug;
|
|
||||||
impl->DeinitializeStart = Android_StopAudioHotplug;
|
|
||||||
impl->OpenDevice = ANDROIDAUDIO_OpenDevice;
|
|
||||||
impl->PlayDevice = ANDROIDAUDIO_PlayDevice;
|
|
||||||
impl->GetDeviceBuf = ANDROIDAUDIO_GetDeviceBuf;
|
|
||||||
impl->CloseDevice = ANDROIDAUDIO_CloseDevice;
|
|
||||||
impl->RecordDevice = ANDROIDAUDIO_RecordDevice;
|
|
||||||
impl->FlushRecording = ANDROIDAUDIO_FlushRecording;
|
|
||||||
|
|
||||||
impl->HasRecordingSupport = SDL_TRUE;
|
|
||||||
|
|
||||||
return SDL_TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
AudioBootStrap ANDROIDAUDIO_bootstrap = {
|
|
||||||
"android", "SDL Android audio driver", ANDROIDAUDIO_Init, SDL_FALSE
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // SDL_AUDIO_DRIVER_ANDROID
|
|
|
@ -1,38 +0,0 @@
|
||||||
/*
|
|
||||||
Simple DirectMedia Layer
|
|
||||||
Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
|
|
||||||
|
|
||||||
This software is provided 'as-is', without any express or implied
|
|
||||||
warranty. In no event will the authors be held liable for any damages
|
|
||||||
arising from the use of this software.
|
|
||||||
|
|
||||||
Permission is granted to anyone to use this software for any purpose,
|
|
||||||
including commercial applications, and to alter it and redistribute it
|
|
||||||
freely, subject to the following restrictions:
|
|
||||||
|
|
||||||
1. The origin of this software must not be misrepresented; you must not
|
|
||||||
claim that you wrote the original software. If you use this software
|
|
||||||
in a product, an acknowledgment in the product documentation would be
|
|
||||||
appreciated but is not required.
|
|
||||||
2. Altered source versions must be plainly marked as such, and must not be
|
|
||||||
misrepresented as being the original software.
|
|
||||||
3. This notice may not be removed or altered from any source distribution.
|
|
||||||
*/
|
|
||||||
#include "SDL_internal.h"
|
|
||||||
|
|
||||||
#ifndef SDL_androidaudio_h_
|
|
||||||
#define SDL_androidaudio_h_
|
|
||||||
|
|
||||||
#ifdef SDL_AUDIO_DRIVER_ANDROID
|
|
||||||
|
|
||||||
void ANDROIDAUDIO_ResumeDevices(void);
|
|
||||||
void ANDROIDAUDIO_PauseDevices(void);
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
static void ANDROIDAUDIO_ResumeDevices(void) {}
|
|
||||||
static void ANDROIDAUDIO_PauseDevices(void) {}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif // SDL_androidaudio_h_
|
|
|
@ -364,16 +364,6 @@ static jclass mAudioManagerClass;
|
||||||
/* method signatures */
|
/* method signatures */
|
||||||
static jmethodID midRegisterAudioDeviceCallback;
|
static jmethodID midRegisterAudioDeviceCallback;
|
||||||
static jmethodID midUnregisterAudioDeviceCallback;
|
static jmethodID midUnregisterAudioDeviceCallback;
|
||||||
static jmethodID midAudioOpen;
|
|
||||||
static jmethodID midAudioWriteByteBuffer;
|
|
||||||
static jmethodID midAudioWriteShortBuffer;
|
|
||||||
static jmethodID midAudioWriteFloatBuffer;
|
|
||||||
static jmethodID midAudioClose;
|
|
||||||
static jmethodID midRecordingOpen;
|
|
||||||
static jmethodID midRecordingReadByteBuffer;
|
|
||||||
static jmethodID midRecordingReadShortBuffer;
|
|
||||||
static jmethodID midRecordingReadFloatBuffer;
|
|
||||||
static jmethodID midRecordingClose;
|
|
||||||
static jmethodID midAudioSetThreadPriority;
|
static jmethodID midAudioSetThreadPriority;
|
||||||
|
|
||||||
/* controller manager */
|
/* controller manager */
|
||||||
|
@ -710,34 +700,10 @@ JNIEXPORT void JNICALL SDL_JAVA_AUDIO_INTERFACE(nativeSetupJNI)(JNIEnv *env, jcl
|
||||||
midUnregisterAudioDeviceCallback = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
midUnregisterAudioDeviceCallback = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
||||||
"unregisterAudioDeviceCallback",
|
"unregisterAudioDeviceCallback",
|
||||||
"()V");
|
"()V");
|
||||||
midAudioOpen = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"audioOpen", "(IIIII)[I");
|
|
||||||
midAudioWriteByteBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"audioWriteByteBuffer", "([B)V");
|
|
||||||
midAudioWriteShortBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"audioWriteShortBuffer", "([S)V");
|
|
||||||
midAudioWriteFloatBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"audioWriteFloatBuffer", "([F)V");
|
|
||||||
midAudioClose = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"audioClose", "()V");
|
|
||||||
midRecordingOpen = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"recordingOpen", "(IIIII)[I");
|
|
||||||
midRecordingReadByteBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"recordingReadByteBuffer", "([BZ)I");
|
|
||||||
midRecordingReadShortBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"recordingReadShortBuffer", "([SZ)I");
|
|
||||||
midRecordingReadFloatBuffer = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"recordingReadFloatBuffer", "([FZ)I");
|
|
||||||
midRecordingClose = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
|
||||||
"recordingClose", "()V");
|
|
||||||
midAudioSetThreadPriority = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
midAudioSetThreadPriority = (*env)->GetStaticMethodID(env, mAudioManagerClass,
|
||||||
"audioSetThreadPriority", "(ZI)V");
|
"audioSetThreadPriority", "(ZI)V");
|
||||||
|
|
||||||
if (!midRegisterAudioDeviceCallback || !midUnregisterAudioDeviceCallback || !midAudioOpen ||
|
if (!midRegisterAudioDeviceCallback || !midUnregisterAudioDeviceCallback || !midAudioSetThreadPriority) {
|
||||||
!midAudioWriteByteBuffer || !midAudioWriteShortBuffer || !midAudioWriteFloatBuffer ||
|
|
||||||
!midAudioClose ||
|
|
||||||
!midRecordingOpen || !midRecordingReadByteBuffer || !midRecordingReadShortBuffer ||
|
|
||||||
!midRecordingReadFloatBuffer || !midRecordingClose || !midAudioSetThreadPriority) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL",
|
__android_log_print(ANDROID_LOG_WARN, "SDL",
|
||||||
"Missing some Java callbacks, do you have the latest version of SDLAudioManager.java?");
|
"Missing some Java callbacks, do you have the latest version of SDLAudioManager.java?");
|
||||||
}
|
}
|
||||||
|
@ -1642,6 +1608,16 @@ void Android_JNI_SetOrientation(int w, int h, int resizable, const char *hint)
|
||||||
(*env)->DeleteLocalRef(env, jhint);
|
(*env)->DeleteLocalRef(env, jhint);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SDL_DisplayOrientation Android_JNI_GetDisplayNaturalOrientation(void)
|
||||||
|
{
|
||||||
|
return displayNaturalOrientation;
|
||||||
|
}
|
||||||
|
|
||||||
|
SDL_DisplayOrientation Android_JNI_GetDisplayCurrentOrientation(void)
|
||||||
|
{
|
||||||
|
return displayCurrentOrientation;
|
||||||
|
}
|
||||||
|
|
||||||
void Android_JNI_MinizeWindow(void)
|
void Android_JNI_MinizeWindow(void)
|
||||||
{
|
{
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
@ -1673,12 +1649,6 @@ SDL_bool Android_JNI_GetAccelerometerValues(float values[3])
|
||||||
/*
|
/*
|
||||||
* Audio support
|
* Audio support
|
||||||
*/
|
*/
|
||||||
static int audioBufferFormat = 0;
|
|
||||||
static jobject audioBuffer = NULL;
|
|
||||||
static void *audioBufferPinned = NULL;
|
|
||||||
static int recordingBufferFormat = 0;
|
|
||||||
static jobject recordingBuffer = NULL;
|
|
||||||
|
|
||||||
void Android_StartAudioHotplug(SDL_AudioDevice **default_playback, SDL_AudioDevice **default_recording)
|
void Android_StartAudioHotplug(SDL_AudioDevice **default_playback, SDL_AudioDevice **default_recording)
|
||||||
{
|
{
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
@ -1693,282 +1663,6 @@ void Android_StopAudioHotplug(void)
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midUnregisterAudioDeviceCallback);
|
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midUnregisterAudioDeviceCallback);
|
||||||
}
|
}
|
||||||
|
|
||||||
int Android_JNI_OpenAudioDevice(SDL_AudioDevice *device)
|
|
||||||
{
|
|
||||||
const SDL_bool recording = device->recording;
|
|
||||||
SDL_AudioSpec *spec = &device->spec;
|
|
||||||
const int device_id = (int) ((size_t) device->handle);
|
|
||||||
int audioformat;
|
|
||||||
jobject jbufobj = NULL;
|
|
||||||
jobject result;
|
|
||||||
int *resultElements;
|
|
||||||
jboolean isCopy;
|
|
||||||
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
|
||||||
|
|
||||||
switch (spec->format) {
|
|
||||||
case SDL_AUDIO_U8:
|
|
||||||
audioformat = ENCODING_PCM_8BIT;
|
|
||||||
break;
|
|
||||||
case SDL_AUDIO_S16:
|
|
||||||
audioformat = ENCODING_PCM_16BIT;
|
|
||||||
break;
|
|
||||||
case SDL_AUDIO_F32:
|
|
||||||
audioformat = ENCODING_PCM_FLOAT;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
return SDL_SetError("Unsupported audio format: 0x%x", spec->format);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
__android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for recording");
|
|
||||||
result = (*env)->CallStaticObjectMethod(env, mAudioManagerClass, midRecordingOpen, spec->freq, audioformat, spec->channels, device->sample_frames, device_id);
|
|
||||||
} else {
|
|
||||||
__android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for playback");
|
|
||||||
result = (*env)->CallStaticObjectMethod(env, mAudioManagerClass, midAudioOpen, spec->freq, audioformat, spec->channels, device->sample_frames, device_id);
|
|
||||||
}
|
|
||||||
if (!result) {
|
|
||||||
/* Error during audio initialization, error printed from Java */
|
|
||||||
return SDL_SetError("Java-side initialization failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((*env)->GetArrayLength(env, (jintArray)result) != 4) {
|
|
||||||
return SDL_SetError("Unexpected results from Java, expected 4, got %d", (*env)->GetArrayLength(env, (jintArray)result));
|
|
||||||
}
|
|
||||||
isCopy = JNI_FALSE;
|
|
||||||
resultElements = (*env)->GetIntArrayElements(env, (jintArray)result, &isCopy);
|
|
||||||
spec->freq = resultElements[0];
|
|
||||||
audioformat = resultElements[1];
|
|
||||||
switch (audioformat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
spec->format = SDL_AUDIO_U8;
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
spec->format = SDL_AUDIO_S16;
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
spec->format = SDL_AUDIO_F32;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
return SDL_SetError("Unexpected audio format from Java: %d", audioformat);
|
|
||||||
}
|
|
||||||
spec->channels = resultElements[2];
|
|
||||||
device->sample_frames = resultElements[3];
|
|
||||||
(*env)->ReleaseIntArrayElements(env, (jintArray)result, resultElements, JNI_ABORT);
|
|
||||||
(*env)->DeleteLocalRef(env, result);
|
|
||||||
|
|
||||||
/* Allocating the audio buffer from the Java side and passing it as the return value for audioInit no longer works on
|
|
||||||
* Android >= 4.2 due to a "stale global reference" error. So now we allocate this buffer directly from this side. */
|
|
||||||
switch (audioformat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
{
|
|
||||||
jbyteArray audioBufferLocal = (*env)->NewByteArray(env, device->sample_frames * spec->channels);
|
|
||||||
if (audioBufferLocal) {
|
|
||||||
jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
|
|
||||||
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
{
|
|
||||||
jshortArray audioBufferLocal = (*env)->NewShortArray(env, device->sample_frames * spec->channels);
|
|
||||||
if (audioBufferLocal) {
|
|
||||||
jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
|
|
||||||
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
{
|
|
||||||
jfloatArray audioBufferLocal = (*env)->NewFloatArray(env, device->sample_frames * spec->channels);
|
|
||||||
if (audioBufferLocal) {
|
|
||||||
jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
|
|
||||||
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
default:
|
|
||||||
return SDL_SetError("Unexpected audio format from Java: %d\n", audioformat);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!jbufobj) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: could not allocate an audio buffer");
|
|
||||||
return SDL_OutOfMemory();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
recordingBufferFormat = audioformat;
|
|
||||||
recordingBuffer = jbufobj;
|
|
||||||
} else {
|
|
||||||
audioBufferFormat = audioformat;
|
|
||||||
audioBuffer = jbufobj;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!recording) {
|
|
||||||
isCopy = JNI_FALSE;
|
|
||||||
|
|
||||||
switch (audioformat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
audioBufferPinned = (*env)->GetByteArrayElements(env, (jbyteArray)audioBuffer, &isCopy);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
audioBufferPinned = (*env)->GetShortArrayElements(env, (jshortArray)audioBuffer, &isCopy);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
audioBufferPinned = (*env)->GetFloatArrayElements(env, (jfloatArray)audioBuffer, &isCopy);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
return SDL_SetError("Unexpected audio format from Java: %d\n", audioformat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_DisplayOrientation Android_JNI_GetDisplayNaturalOrientation(void)
|
|
||||||
{
|
|
||||||
return displayNaturalOrientation;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_DisplayOrientation Android_JNI_GetDisplayCurrentOrientation(void)
|
|
||||||
{
|
|
||||||
return displayCurrentOrientation;
|
|
||||||
}
|
|
||||||
|
|
||||||
void *Android_JNI_GetAudioBuffer(void)
|
|
||||||
{
|
|
||||||
return audioBufferPinned;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Android_JNI_WriteAudioBuffer(void)
|
|
||||||
{
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
|
||||||
|
|
||||||
switch (audioBufferFormat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
(*env)->ReleaseByteArrayElements(env, (jbyteArray)audioBuffer, (jbyte *)audioBufferPinned, JNI_COMMIT);
|
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midAudioWriteByteBuffer, (jbyteArray)audioBuffer);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
(*env)->ReleaseShortArrayElements(env, (jshortArray)audioBuffer, (jshort *)audioBufferPinned, JNI_COMMIT);
|
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midAudioWriteShortBuffer, (jshortArray)audioBuffer);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
(*env)->ReleaseFloatArrayElements(env, (jfloatArray)audioBuffer, (jfloat *)audioBufferPinned, JNI_COMMIT);
|
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midAudioWriteFloatBuffer, (jfloatArray)audioBuffer);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: unhandled audio buffer format");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* JNI_COMMIT means the changes are committed to the VM but the buffer remains pinned */
|
|
||||||
}
|
|
||||||
|
|
||||||
int Android_JNI_RecordAudioBuffer(void *buffer, int buflen)
|
|
||||||
{
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
|
||||||
jboolean isCopy = JNI_FALSE;
|
|
||||||
jint br = -1;
|
|
||||||
|
|
||||||
switch (recordingBufferFormat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
SDL_assert((*env)->GetArrayLength(env, (jshortArray)recordingBuffer) == buflen);
|
|
||||||
br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadByteBuffer, (jbyteArray)recordingBuffer, JNI_TRUE);
|
|
||||||
if (br > 0) {
|
|
||||||
jbyte *ptr = (*env)->GetByteArrayElements(env, (jbyteArray)recordingBuffer, &isCopy);
|
|
||||||
SDL_memcpy(buffer, ptr, br);
|
|
||||||
(*env)->ReleaseByteArrayElements(env, (jbyteArray)recordingBuffer, ptr, JNI_ABORT);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
SDL_assert((*env)->GetArrayLength(env, (jshortArray)recordingBuffer) == (buflen / sizeof(Sint16)));
|
|
||||||
br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadShortBuffer, (jshortArray)recordingBuffer, JNI_TRUE);
|
|
||||||
if (br > 0) {
|
|
||||||
jshort *ptr = (*env)->GetShortArrayElements(env, (jshortArray)recordingBuffer, &isCopy);
|
|
||||||
br *= sizeof(Sint16);
|
|
||||||
SDL_memcpy(buffer, ptr, br);
|
|
||||||
(*env)->ReleaseShortArrayElements(env, (jshortArray)recordingBuffer, ptr, JNI_ABORT);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
SDL_assert((*env)->GetArrayLength(env, (jfloatArray)recordingBuffer) == (buflen / sizeof(float)));
|
|
||||||
br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadFloatBuffer, (jfloatArray)recordingBuffer, JNI_TRUE);
|
|
||||||
if (br > 0) {
|
|
||||||
jfloat *ptr = (*env)->GetFloatArrayElements(env, (jfloatArray)recordingBuffer, &isCopy);
|
|
||||||
br *= sizeof(float);
|
|
||||||
SDL_memcpy(buffer, ptr, br);
|
|
||||||
(*env)->ReleaseFloatArrayElements(env, (jfloatArray)recordingBuffer, ptr, JNI_ABORT);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: unhandled recording buffer format");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return br;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Android_JNI_FlushRecordedAudio(void)
|
|
||||||
{
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
|
||||||
#if 0 /* !!! FIXME: this needs API 23, or it'll do blocking reads and never end. */
|
|
||||||
switch (recordingBufferFormat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
{
|
|
||||||
const jint len = (*env)->GetArrayLength(env, (jbyteArray)recordingBuffer);
|
|
||||||
while ((*env)->CallStaticIntMethod(env, mActivityClass, midRecordingReadByteBuffer, (jbyteArray)recordingBuffer, JNI_FALSE) == len) { /* spin */ }
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
{
|
|
||||||
const jint len = (*env)->GetArrayLength(env, (jshortArray)recordingBuffer);
|
|
||||||
while ((*env)->CallStaticIntMethod(env, mActivityClass, midRecordingReadShortBuffer, (jshortArray)recordingBuffer, JNI_FALSE) == len) { /* spin */ }
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
{
|
|
||||||
const jint len = (*env)->GetArrayLength(env, (jfloatArray)recordingBuffer);
|
|
||||||
while ((*env)->CallStaticIntMethod(env, mActivityClass, midRecordingReadFloatBuffer, (jfloatArray)recordingBuffer, JNI_FALSE) == len) { /* spin */ }
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: flushing unhandled recording buffer format");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
switch (recordingBufferFormat) {
|
|
||||||
case ENCODING_PCM_8BIT:
|
|
||||||
(*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadByteBuffer, (jbyteArray)recordingBuffer, JNI_FALSE);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_16BIT:
|
|
||||||
(*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadShortBuffer, (jshortArray)recordingBuffer, JNI_FALSE);
|
|
||||||
break;
|
|
||||||
case ENCODING_PCM_FLOAT:
|
|
||||||
(*env)->CallStaticIntMethod(env, mAudioManagerClass, midRecordingReadFloatBuffer, (jfloatArray)recordingBuffer, JNI_FALSE);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: flushing unhandled recording buffer format");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void Android_JNI_CloseAudioDevice(const int recording)
|
|
||||||
{
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
|
||||||
|
|
||||||
if (recording) {
|
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midRecordingClose);
|
|
||||||
if (recordingBuffer) {
|
|
||||||
(*env)->DeleteGlobalRef(env, recordingBuffer);
|
|
||||||
recordingBuffer = NULL;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
(*env)->CallStaticVoidMethod(env, mAudioManagerClass, midAudioClose);
|
|
||||||
if (audioBuffer) {
|
|
||||||
(*env)->DeleteGlobalRef(env, audioBuffer);
|
|
||||||
audioBuffer = NULL;
|
|
||||||
audioBufferPinned = NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void Android_JNI_AudioSetThreadPriority(int recording, int device_id)
|
static void Android_JNI_AudioSetThreadPriority(int recording, int device_id)
|
||||||
{
|
{
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
|
|
@ -75,12 +75,6 @@ extern SDL_DisplayOrientation Android_JNI_GetDisplayCurrentOrientation(void);
|
||||||
void Android_StartAudioHotplug(SDL_AudioDevice **default_playback, SDL_AudioDevice **default_recording);
|
void Android_StartAudioHotplug(SDL_AudioDevice **default_playback, SDL_AudioDevice **default_recording);
|
||||||
void Android_StopAudioHotplug(void);
|
void Android_StopAudioHotplug(void);
|
||||||
extern void Android_AudioThreadInit(SDL_AudioDevice *device);
|
extern void Android_AudioThreadInit(SDL_AudioDevice *device);
|
||||||
extern int Android_JNI_OpenAudioDevice(SDL_AudioDevice *device);
|
|
||||||
extern void *Android_JNI_GetAudioBuffer(void);
|
|
||||||
extern void Android_JNI_WriteAudioBuffer(void);
|
|
||||||
extern int Android_JNI_RecordAudioBuffer(void *buffer, int buflen);
|
|
||||||
extern void Android_JNI_FlushRecordedAudio(void);
|
|
||||||
extern void Android_JNI_CloseAudioDevice(const int recording);
|
|
||||||
|
|
||||||
/* Detecting device type */
|
/* Detecting device type */
|
||||||
extern SDL_bool Android_IsDeXMode(void);
|
extern SDL_bool Android_IsDeXMode(void);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue