mirror of https://github.com/encounter/SDL.git
180 lines
7.2 KiB
Java
180 lines
7.2 KiB
Java
|
package org.libsdl.app;
|
||
|
|
||
|
import android.media.*;
|
||
|
import android.hardware.*;
|
||
|
import android.util.Log;
|
||
|
|
||
|
public class SDLAudioManager
|
||
|
{
|
||
|
protected static final String TAG = "SDLAudio";
|
||
|
|
||
|
protected static AudioTrack mAudioTrack;
|
||
|
protected static AudioRecord mAudioRecord;
|
||
|
|
||
|
public static void initialize() {
|
||
|
mAudioTrack = null;
|
||
|
mAudioRecord = null;
|
||
|
}
|
||
|
|
||
|
// Audio
|
||
|
|
||
|
/**
|
||
|
* This method is called by SDL using JNI.
|
||
|
*/
|
||
|
public static int audioOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
|
||
|
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
|
||
|
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
|
||
|
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
|
||
|
|
||
|
Log.v(TAG, "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||
|
|
||
|
// Let the user pick a larger buffer if they really want -- but ye
|
||
|
// gods they probably shouldn't, the minimums are horrifyingly high
|
||
|
// latency already
|
||
|
desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
|
||
|
|
||
|
if (mAudioTrack == null) {
|
||
|
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
|
||
|
channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
|
||
|
|
||
|
// Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
|
||
|
// Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
|
||
|
// Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
|
||
|
|
||
|
if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
|
||
|
Log.e(TAG, "Failed during initialization of Audio Track");
|
||
|
mAudioTrack = null;
|
||
|
return -1;
|
||
|
}
|
||
|
|
||
|
mAudioTrack.play();
|
||
|
}
|
||
|
|
||
|
Log.v(TAG, "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||
|
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* This method is called by SDL using JNI.
|
||
|
*/
|
||
|
public static void audioWriteShortBuffer(short[] buffer) {
|
||
|
if (mAudioTrack == null) {
|
||
|
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
for (int i = 0; i < buffer.length; ) {
|
||
|
int result = mAudioTrack.write(buffer, i, buffer.length - i);
|
||
|
if (result > 0) {
|
||
|
i += result;
|
||
|
} else if (result == 0) {
|
||
|
try {
|
||
|
Thread.sleep(1);
|
||
|
} catch(InterruptedException e) {
|
||
|
// Nom nom
|
||
|
}
|
||
|
} else {
|
||
|
Log.w(TAG, "SDL audio: error return from write(short)");
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* This method is called by SDL using JNI.
|
||
|
*/
|
||
|
public static void audioWriteByteBuffer(byte[] buffer) {
|
||
|
if (mAudioTrack == null) {
|
||
|
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
for (int i = 0; i < buffer.length; ) {
|
||
|
int result = mAudioTrack.write(buffer, i, buffer.length - i);
|
||
|
if (result > 0) {
|
||
|
i += result;
|
||
|
} else if (result == 0) {
|
||
|
try {
|
||
|
Thread.sleep(1);
|
||
|
} catch(InterruptedException e) {
|
||
|
// Nom nom
|
||
|
}
|
||
|
} else {
|
||
|
Log.w(TAG, "SDL audio: error return from write(byte)");
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* This method is called by SDL using JNI.
|
||
|
*/
|
||
|
public static int captureOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
|
||
|
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
|
||
|
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
|
||
|
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
|
||
|
|
||
|
Log.v(TAG, "SDL capture: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||
|
|
||
|
// Let the user pick a larger buffer if they really want -- but ye
|
||
|
// gods they probably shouldn't, the minimums are horrifyingly high
|
||
|
// latency already
|
||
|
desiredFrames = Math.max(desiredFrames, (AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
|
||
|
|
||
|
if (mAudioRecord == null) {
|
||
|
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
|
||
|
channelConfig, audioFormat, desiredFrames * frameSize);
|
||
|
|
||
|
// see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
|
||
|
if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||
|
Log.e(TAG, "Failed during initialization of AudioRecord");
|
||
|
mAudioRecord.release();
|
||
|
mAudioRecord = null;
|
||
|
return -1;
|
||
|
}
|
||
|
|
||
|
mAudioRecord.startRecording();
|
||
|
}
|
||
|
|
||
|
Log.v(TAG, "SDL capture: got " + ((mAudioRecord.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioRecord.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||
|
|
||
|
return 0;
|
||
|
}
|
||
|
|
||
|
/** This method is called by SDL using JNI. */
|
||
|
public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
|
||
|
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
|
||
|
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
||
|
return mAudioRecord.read(buffer, 0, buffer.length);
|
||
|
}
|
||
|
|
||
|
/** This method is called by SDL using JNI. */
|
||
|
public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
|
||
|
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
|
||
|
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
||
|
return mAudioRecord.read(buffer, 0, buffer.length);
|
||
|
}
|
||
|
|
||
|
|
||
|
/** This method is called by SDL using JNI. */
|
||
|
public static void audioClose() {
|
||
|
if (mAudioTrack != null) {
|
||
|
mAudioTrack.stop();
|
||
|
mAudioTrack.release();
|
||
|
mAudioTrack = null;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/** This method is called by SDL using JNI. */
|
||
|
public static void captureClose() {
|
||
|
if (mAudioRecord != null) {
|
||
|
mAudioRecord.stop();
|
||
|
mAudioRecord.release();
|
||
|
mAudioRecord = null;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
public static native int nativeSetupJNI();
|
||
|
}
|