Android: Refactor AndroidAudioRenderer into an interface with 2 implementations (current MediaPlayer/SoundPool and new OpenAL Soft). Added AppSetting that allows AndroidHarness to switch the audio renderer (default is still MediaPlayer/SoundPool).

git-svn-id: https://jmonkeyengine.googlecode.com/svn/trunk@10615 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
3.0
iwg..om 12 years ago
parent 96f7b179b5
commit e727928731
  1. 26
      engine/src/android/com/jme3/app/AndroidHarness.java
  2. 17
      engine/src/android/com/jme3/asset/AndroidAssetManager.java
  3. 521
      engine/src/android/com/jme3/audio/android/AndroidAudioRenderer.java
  4. 523
      engine/src/android/com/jme3/audio/android/AndroidMediaPlayerAudioRenderer.java
  5. 2
      engine/src/android/com/jme3/audio/android/AndroidOpenALSoftAudioRenderer.java
  6. 35
      engine/src/android/com/jme3/system/android/JmeAndroidSystem.java
  7. 19
      engine/src/core/com/jme3/system/AppSettings.java

@ -15,7 +15,6 @@ import android.widget.ImageView;
import android.widget.TextView;
import com.jme3.audio.AudioRenderer;
import com.jme3.audio.android.AndroidAudioRenderer;
import com.jme3.audio.android.AndroidOpenALSoftAudioRenderer;
import com.jme3.input.JoyInput;
import com.jme3.input.TouchInput;
import com.jme3.input.android.AndroidSensorJoyInput;
@ -72,6 +71,20 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt
* set to 2, 4 to enable multisampling.
*/
protected int antiAliasingSamples = 0;
/**
* Sets the type of Audio Renderer to be used.
* <p>
* Android MediaPlayer / SoundPool is the default and can be used on all
* supported Android platform versions (2.2+)<br>
* OpenAL Soft uses an OpenSL backend and is only supported on Android
* versions 2.3+.
* <p>
* Only use ANDROID_ static strings found in AppSettings
*
*/
protected String audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
/**
* If true Android Sensors are used as simulated Joysticks Users can use the
* Android sensor feedback through the RawInputListener or by registering
@ -110,7 +123,7 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt
/**
* Set the screen window mode. If screenFullSize is true, then the
* notification bar and title bar are removed and the screen covers the
* entire display.   If screenFullSize is false, then the notification bar
* entire display. If screenFullSize is false, then the notification bar
* remains visible if screenShowTitle is true while screenFullScreen is
* false, then the title bar is also displayed under the notification bar.
*/
@ -200,6 +213,7 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt
settings.setSamples(antiAliasingSamples);
settings.setResolution(disp.getWidth(), disp.getHeight());
settings.put(AndroidConfigChooser.SETTINGS_CONFIG_TYPE, eglConfigType);
settings.setAudioRenderer(audioRendererType);
// Create application instance
@ -487,10 +501,6 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt
AndroidAudioRenderer renderer = (AndroidAudioRenderer) result;
renderer.resumeAll();
}
if (result instanceof AndroidOpenALSoftAudioRenderer) {
AndroidOpenALSoftAudioRenderer renderer = (AndroidOpenALSoftAudioRenderer) result;
renderer.resumeAll();
}
}
//resume the sensors (aka joysticks)
if (app.getContext() != null) {
@ -530,10 +540,6 @@ public class AndroidHarness extends Activity implements TouchListener, DialogInt
AndroidAudioRenderer renderer = (AndroidAudioRenderer) result;
renderer.pauseAll();
}
if (result instanceof AndroidOpenALSoftAudioRenderer) {
AndroidOpenALSoftAudioRenderer renderer = (AndroidOpenALSoftAudioRenderer) result;
renderer.pauseAll();
}
}
//pause the sensors (aka joysticks)
if (app.getContext() != null) {

@ -33,8 +33,11 @@ package com.jme3.asset;
import com.jme3.asset.plugins.AndroidLocator;
import com.jme3.asset.plugins.ClasspathLocator;
import com.jme3.audio.android.AndroidAudioRenderer;
import com.jme3.audio.plugins.AndroidAudioLoader;
import com.jme3.texture.Texture;
import com.jme3.audio.plugins.WAVLoader;
import com.jme3.system.AppSettings;
import com.jme3.system.android.JmeAndroidSystem;
import com.jme3.texture.plugins.AndroidImageLoader;
import java.net.URL;
import java.util.logging.Level;
@ -81,7 +84,17 @@ public class AndroidAssetManager extends DesktopAssetManager {
registerLocator("", ClasspathLocator.class);
registerLoader(AndroidImageLoader.class, "jpg", "bmp", "gif", "png", "jpeg");
registerLoader(AndroidAudioLoader.class, "ogg", "mp3", "wav");
if (JmeAndroidSystem.getAudioRendererType().equals(AppSettings.ANDROID_MEDIAPLAYER)) {
registerLoader(AndroidAudioLoader.class, "ogg", "mp3", "wav");
} else if (JmeAndroidSystem.getAudioRendererType().equals(AppSettings.ANDROID_OPENAL_SOFT)) {
registerLoader(WAVLoader.class, "wav");
// TODO jogg is not in core, need to add some other way to get around compile errors, or not.
// registerLoader(com.jme3.audio.plugins.OGGLoader.class, "ogg");
registerLoaderSafe("com.jme3.audio.plugins.OGGLoader", "ogg");
} else {
throw new IllegalStateException("No Audio Renderer Type defined!");
}
registerLoader(com.jme3.material.plugins.J3MLoader.class, "j3m");
registerLoader(com.jme3.material.plugins.J3MLoader.class, "j3md");
registerLoader(com.jme3.material.plugins.ShaderNodeDefinitionLoader.class, "j3sn");

@ -1,523 +1,24 @@
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.audio.android;
import android.app.Activity;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.SoundPool;
import com.jme3.asset.AssetKey;
import com.jme3.audio.*;
import com.jme3.audio.AudioSource.Status;
import com.jme3.math.FastMath;
import com.jme3.math.Vector3f;
import java.io.IOException;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme3.audio.AudioRenderer;
/**
* This class is the android implementation for {@link AudioRenderer}
* Android specific AudioRenderer interface that supports pausing and resuming
* audio files when the app is minimized or placed in the background
*
* @author larynx
* @author plan_rich
* @author iwgeric
*/
public class AndroidAudioRenderer implements AudioRenderer,
SoundPool.OnLoadCompleteListener, MediaPlayer.OnCompletionListener {
private static final Logger logger = Logger.getLogger(AndroidAudioRenderer.class.getName());
private final static int MAX_NUM_CHANNELS = 16;
private final HashMap<AudioSource, MediaPlayer> musicPlaying = new HashMap<AudioSource, MediaPlayer>();
private SoundPool soundPool = null;
private final Vector3f listenerPosition = new Vector3f();
// For temp use
private final Vector3f distanceVector = new Vector3f();
private final AssetManager assetManager;
private HashMap<Integer, AudioSource> soundpoolStillLoading = new HashMap<Integer, AudioSource>();
private Listener listener;
private boolean audioDisabled = false;
private final AudioManager manager;
public AndroidAudioRenderer(Activity context) {
manager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
context.setVolumeControlStream(AudioManager.STREAM_MUSIC);
assetManager = context.getAssets();
}
@Override
public void initialize() {
soundPool = new SoundPool(MAX_NUM_CHANNELS, AudioManager.STREAM_MUSIC,
0);
soundPool.setOnLoadCompleteListener(this);
}
@Override
public void updateSourceParam(AudioSource src, AudioParam param) {
if (audioDisabled) {
return;
}
if (src.getChannel() < 0) {
return;
}
switch (param) {
case Position:
if (!src.isPositional()) {
return;
}
Vector3f pos = src.getPosition();
break;
case Velocity:
if (!src.isPositional()) {
return;
}
Vector3f vel = src.getVelocity();
break;
case MaxDistance:
if (!src.isPositional()) {
return;
}
break;
case RefDistance:
if (!src.isPositional()) {
return;
}
break;
case ReverbFilter:
if (!src.isPositional() || !src.isReverbEnabled()) {
return;
}
break;
case ReverbEnabled:
if (!src.isPositional()) {
return;
}
if (src.isReverbEnabled()) {
updateSourceParam(src, AudioParam.ReverbFilter);
}
break;
case IsPositional:
break;
case Direction:
if (!src.isDirectional()) {
return;
}
Vector3f dir = src.getDirection();
break;
case InnerAngle:
if (!src.isDirectional()) {
return;
}
break;
case OuterAngle:
if (!src.isDirectional()) {
return;
}
break;
case IsDirectional:
if (src.isDirectional()) {
updateSourceParam(src, AudioParam.Direction);
updateSourceParam(src, AudioParam.InnerAngle);
updateSourceParam(src, AudioParam.OuterAngle);
} else {
}
break;
case DryFilter:
if (src.getDryFilter() != null) {
Filter f = src.getDryFilter();
if (f.isUpdateNeeded()) {
// updateFilter(f);
}
}
break;
case Looping:
if (src.isLooping()) {
}
break;
case Volume:
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.setVolume(src.getVolume(), src.getVolume());
} else {
soundPool.setVolume(src.getChannel(), src.getVolume(),
src.getVolume());
}
break;
case Pitch:
break;
}
}
@Override
public void updateListenerParam(Listener listener, ListenerParam param) {
if (audioDisabled) {
return;
}
switch (param) {
case Position:
listenerPosition.set(listener.getLocation());
break;
case Rotation:
Vector3f dir = listener.getDirection();
Vector3f up = listener.getUp();
break;
case Velocity:
Vector3f vel = listener.getVelocity();
break;
case Volume:
// alListenerf(AL_GAIN, listener.getVolume());
break;
}
}
@Override
public void update(float tpf) {
float distance;
float volume;
// Loop over all mediaplayers
for (AudioSource src : musicPlaying.keySet()) {
MediaPlayer mp = musicPlaying.get(src);
// Calc the distance to the listener
distanceVector.set(listenerPosition);
distanceVector.subtractLocal(src.getPosition());
distance = FastMath.abs(distanceVector.length());
if (distance < src.getRefDistance()) {
distance = src.getRefDistance();
}
if (distance > src.getMaxDistance()) {
distance = src.getMaxDistance();
}
volume = src.getRefDistance() / distance;
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
if (FastMath.abs(audioData.getCurrentVolume() - volume) > FastMath.FLT_EPSILON) {
// Left / Right channel get the same volume by now, only
// positional
mp.setVolume(volume, volume);
audioData.setCurrentVolume(volume);
}
}
}
public void setListener(Listener listener) {
if (audioDisabled) {
return;
}
if (this.listener != null) {
// previous listener no longer associated with current
// renderer
this.listener.setRenderer(null);
}
this.listener = listener;
this.listener.setRenderer(this);
}
@Override
public void cleanup() {
// Cleanup sound pool
if (soundPool != null) {
soundPool.release();
soundPool = null;
}
// Cleanup media player
for (AudioSource src : musicPlaying.keySet()) {
MediaPlayer mp = musicPlaying.get(src);
{
mp.stop();
mp.release();
src.setStatus(Status.Stopped);
}
}
musicPlaying.clear();
}
@Override
public void onCompletion(MediaPlayer mp) {
if (mp.isPlaying()) {
mp.seekTo(0);
mp.stop();
}
// XXX: This has bad performance -> maybe change overall structure of
// mediaplayer in this audiorenderer?
for (AudioSource src : musicPlaying.keySet()) {
if (musicPlaying.get(src) == mp) {
src.setStatus(Status.Stopped);
break;
}
}
}
public interface AndroidAudioRenderer extends AudioRenderer {
/**
* Plays using the {@link SoundPool} of Android. Due to hard limitation of
* the SoundPool: After playing more instances of the sound you only have
* the channel of the last played instance.
*
* It is not possible to get information about the state of the soundpool of
* a specific streamid, so removing is not possilbe -> noone knows when
* sound finished.
* Pauses all Playing audio. To be used when the app is placed in the
* background.
*/
public void playSourceInstance(AudioSource src) {
if (audioDisabled) {
return;
}
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
if (!(audioData.getAssetKey() instanceof AudioKey)) {
throw new IllegalArgumentException("Asset is not a AudioKey");
}
AudioKey assetKey = (AudioKey) audioData.getAssetKey();
try {
if (audioData.getId() < 0) { // found something to load
int soundId = soundPool.load(
assetManager.openFd(assetKey.getName()), 1);
audioData.setId(soundId);
}
int channel = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f);
if (channel == 0) {
soundpoolStillLoading.put(audioData.getId(), src);
} else {
if (src.getStatus() != Status.Stopped) {
soundPool.stop(channel);
src.setStatus(Status.Stopped);
}
src.setChannel(channel); // receive a channel at the last
setSourceParams(src);
// playing at least
}
} catch (IOException e) {
logger.log(Level.SEVERE,
"Failed to load sound " + assetKey.getName(), e);
audioData.setId(-1);
}
}
@Override
public void onLoadComplete(SoundPool soundPool, int sampleId, int status) {
AudioSource src = soundpoolStillLoading.remove(sampleId);
if (src == null) {
logger.warning("Something went terribly wrong! onLoadComplete"
+ " had sampleId which was not in the HashMap of loading items");
return;
}
AudioData audioData = src.getAudioData();
// load was successfull
if (status == 0) {
int channelIndex;
channelIndex = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f);
src.setChannel(channelIndex);
setSourceParams(src);
}
}
public void playSource(AudioSource src) {
if (audioDisabled) {
return;
}
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
MediaPlayer mp = musicPlaying.get(src);
if (mp == null) {
mp = new MediaPlayer();
mp.setOnCompletionListener(this);
mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
}
try {
if (src.getStatus() == Status.Stopped) {
mp.reset();
AssetKey<?> key = audioData.getAssetKey();
AssetFileDescriptor afd = assetManager.openFd(key.getName()); // assetKey.getName()
mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(),
afd.getLength());
mp.prepare();
setSourceParams(src, mp);
src.setChannel(0);
src.setStatus(Status.Playing);
musicPlaying.put(src, mp);
mp.start();
} else {
mp.start();
}
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
private void setSourceParams(AudioSource src, MediaPlayer mp) {
mp.setLooping(src.isLooping());
mp.setVolume(src.getVolume(), src.getVolume());
//src.getDryFilter();
}
private void setSourceParams(AudioSource src) {
soundPool.setLoop(src.getChannel(), src.isLooping() ? -1 : 0);
soundPool.setVolume(src.getChannel(), src.getVolume(), src.getVolume());
}
public void pauseAll();
/**
* Pause the current playing sounds. Both from the {@link SoundPool} and the
* active {@link MediaPlayer}s
* Resumes all Paused audio. To be used when the app is brought back to
* the foreground.
*/
public void pauseAll() {
if (soundPool != null) {
soundPool.autoPause();
for (MediaPlayer mp : musicPlaying.values()) {
if(mp.isPlaying()){
mp.pause();
}
}
}
}
/**
* Resume all paused sounds.
*/
public void resumeAll() {
if (soundPool != null) {
soundPool.autoResume();
for (MediaPlayer mp : musicPlaying.values()) {
mp.start(); //no resume -> api says call start to resume
}
}
}
public void pauseSource(AudioSource src) {
if (audioDisabled) {
return;
}
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.pause();
src.setStatus(Status.Paused);
} else {
int channel = src.getChannel();
if (channel != -1) {
soundPool.pause(channel); // is not very likley to make
} // something useful :)
}
}
public void stopSource(AudioSource src) {
if (audioDisabled) {
return;
}
// can be stream or buffer -> so try to get mediaplayer
// if there is non try to stop soundpool
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.stop();
mp.reset();
src.setStatus(Status.Stopped);
} else {
int channel = src.getChannel();
if (channel != -1) {
soundPool.pause(channel); // is not very likley to make
// something useful :)
}
}
}
@Override
public void deleteAudioData(AudioData ad) {
for (AudioSource src : musicPlaying.keySet()) {
if (src.getAudioData() == ad) {
MediaPlayer mp = musicPlaying.remove(src);
mp.stop();
mp.release();
src.setStatus(Status.Stopped);
src.setChannel(-1);
ad.setId(-1);
break;
}
}
if (ad.getId() > 0) {
soundPool.unload(ad.getId());
ad.setId(-1);
}
}
@Override
public void setEnvironment(Environment env) {
// not yet supported
}
@Override
public void deleteFilter(Filter filter) {
}
public void resumeAll();
}

@ -0,0 +1,523 @@
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.audio.android;
import android.app.Activity;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.SoundPool;
import com.jme3.asset.AssetKey;
import com.jme3.audio.*;
import com.jme3.audio.AudioSource.Status;
import com.jme3.math.FastMath;
import com.jme3.math.Vector3f;
import java.io.IOException;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This class is the android implementation for {@link AudioRenderer}
*
* @author larynx
* @author plan_rich
*/
public class AndroidMediaPlayerAudioRenderer implements AndroidAudioRenderer,
SoundPool.OnLoadCompleteListener, MediaPlayer.OnCompletionListener {
private static final Logger logger = Logger.getLogger(AndroidMediaPlayerAudioRenderer.class.getName());
private final static int MAX_NUM_CHANNELS = 16;
private final HashMap<AudioSource, MediaPlayer> musicPlaying = new HashMap<AudioSource, MediaPlayer>();
private SoundPool soundPool = null;
private final Vector3f listenerPosition = new Vector3f();
// For temp use
private final Vector3f distanceVector = new Vector3f();
private final AssetManager assetManager;
private HashMap<Integer, AudioSource> soundpoolStillLoading = new HashMap<Integer, AudioSource>();
private Listener listener;
private boolean audioDisabled = false;
private final AudioManager manager;
public AndroidMediaPlayerAudioRenderer(Activity context) {
manager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
context.setVolumeControlStream(AudioManager.STREAM_MUSIC);
assetManager = context.getAssets();
}
@Override
public void initialize() {
soundPool = new SoundPool(MAX_NUM_CHANNELS, AudioManager.STREAM_MUSIC,
0);
soundPool.setOnLoadCompleteListener(this);
}
@Override
public void updateSourceParam(AudioSource src, AudioParam param) {
if (audioDisabled) {
return;
}
if (src.getChannel() < 0) {
return;
}
switch (param) {
case Position:
if (!src.isPositional()) {
return;
}
Vector3f pos = src.getPosition();
break;
case Velocity:
if (!src.isPositional()) {
return;
}
Vector3f vel = src.getVelocity();
break;
case MaxDistance:
if (!src.isPositional()) {
return;
}
break;
case RefDistance:
if (!src.isPositional()) {
return;
}
break;
case ReverbFilter:
if (!src.isPositional() || !src.isReverbEnabled()) {
return;
}
break;
case ReverbEnabled:
if (!src.isPositional()) {
return;
}
if (src.isReverbEnabled()) {
updateSourceParam(src, AudioParam.ReverbFilter);
}
break;
case IsPositional:
break;
case Direction:
if (!src.isDirectional()) {
return;
}
Vector3f dir = src.getDirection();
break;
case InnerAngle:
if (!src.isDirectional()) {
return;
}
break;
case OuterAngle:
if (!src.isDirectional()) {
return;
}
break;
case IsDirectional:
if (src.isDirectional()) {
updateSourceParam(src, AudioParam.Direction);
updateSourceParam(src, AudioParam.InnerAngle);
updateSourceParam(src, AudioParam.OuterAngle);
} else {
}
break;
case DryFilter:
if (src.getDryFilter() != null) {
Filter f = src.getDryFilter();
if (f.isUpdateNeeded()) {
// updateFilter(f);
}
}
break;
case Looping:
if (src.isLooping()) {
}
break;
case Volume:
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.setVolume(src.getVolume(), src.getVolume());
} else {
soundPool.setVolume(src.getChannel(), src.getVolume(),
src.getVolume());
}
break;
case Pitch:
break;
}
}
@Override
public void updateListenerParam(Listener listener, ListenerParam param) {
if (audioDisabled) {
return;
}
switch (param) {
case Position:
listenerPosition.set(listener.getLocation());
break;
case Rotation:
Vector3f dir = listener.getDirection();
Vector3f up = listener.getUp();
break;
case Velocity:
Vector3f vel = listener.getVelocity();
break;
case Volume:
// alListenerf(AL_GAIN, listener.getVolume());
break;
}
}
@Override
public void update(float tpf) {
float distance;
float volume;
// Loop over all mediaplayers
for (AudioSource src : musicPlaying.keySet()) {
MediaPlayer mp = musicPlaying.get(src);
// Calc the distance to the listener
distanceVector.set(listenerPosition);
distanceVector.subtractLocal(src.getPosition());
distance = FastMath.abs(distanceVector.length());
if (distance < src.getRefDistance()) {
distance = src.getRefDistance();
}
if (distance > src.getMaxDistance()) {
distance = src.getMaxDistance();
}
volume = src.getRefDistance() / distance;
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
if (FastMath.abs(audioData.getCurrentVolume() - volume) > FastMath.FLT_EPSILON) {
// Left / Right channel get the same volume by now, only
// positional
mp.setVolume(volume, volume);
audioData.setCurrentVolume(volume);
}
}
}
public void setListener(Listener listener) {
if (audioDisabled) {
return;
}
if (this.listener != null) {
// previous listener no longer associated with current
// renderer
this.listener.setRenderer(null);
}
this.listener = listener;
this.listener.setRenderer(this);
}
@Override
public void cleanup() {
// Cleanup sound pool
if (soundPool != null) {
soundPool.release();
soundPool = null;
}
// Cleanup media player
for (AudioSource src : musicPlaying.keySet()) {
MediaPlayer mp = musicPlaying.get(src);
{
mp.stop();
mp.release();
src.setStatus(Status.Stopped);
}
}
musicPlaying.clear();
}
@Override
public void onCompletion(MediaPlayer mp) {
if (mp.isPlaying()) {
mp.seekTo(0);
mp.stop();
}
// XXX: This has bad performance -> maybe change overall structure of
// mediaplayer in this audiorenderer?
for (AudioSource src : musicPlaying.keySet()) {
if (musicPlaying.get(src) == mp) {
src.setStatus(Status.Stopped);
break;
}
}
}
/**
* Plays using the {@link SoundPool} of Android. Due to hard limitation of
* the SoundPool: After playing more instances of the sound you only have
* the channel of the last played instance.
*
* It is not possible to get information about the state of the soundpool of
* a specific streamid, so removing is not possilbe -> noone knows when
* sound finished.
*/
public void playSourceInstance(AudioSource src) {
if (audioDisabled) {
return;
}
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
if (!(audioData.getAssetKey() instanceof AudioKey)) {
throw new IllegalArgumentException("Asset is not a AudioKey");
}
AudioKey assetKey = (AudioKey) audioData.getAssetKey();
try {
if (audioData.getId() < 0) { // found something to load
int soundId = soundPool.load(
assetManager.openFd(assetKey.getName()), 1);
audioData.setId(soundId);
}
int channel = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f);
if (channel == 0) {
soundpoolStillLoading.put(audioData.getId(), src);
} else {
if (src.getStatus() != Status.Stopped) {
soundPool.stop(channel);
src.setStatus(Status.Stopped);
}
src.setChannel(channel); // receive a channel at the last
setSourceParams(src);
// playing at least
}
} catch (IOException e) {
logger.log(Level.SEVERE,
"Failed to load sound " + assetKey.getName(), e);
audioData.setId(-1);
}
}
@Override
public void onLoadComplete(SoundPool soundPool, int sampleId, int status) {
AudioSource src = soundpoolStillLoading.remove(sampleId);
if (src == null) {
logger.warning("Something went terribly wrong! onLoadComplete"
+ " had sampleId which was not in the HashMap of loading items");
return;
}
AudioData audioData = src.getAudioData();
// load was successfull
if (status == 0) {
int channelIndex;
channelIndex = soundPool.play(audioData.getId(), 1f, 1f, 1, 0, 1f);
src.setChannel(channelIndex);
setSourceParams(src);
}
}
public void playSource(AudioSource src) {
if (audioDisabled) {
return;
}
AndroidAudioData audioData = (AndroidAudioData) src.getAudioData();
MediaPlayer mp = musicPlaying.get(src);
if (mp == null) {
mp = new MediaPlayer();
mp.setOnCompletionListener(this);
mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
}
try {
if (src.getStatus() == Status.Stopped) {
mp.reset();
AssetKey<?> key = audioData.getAssetKey();
AssetFileDescriptor afd = assetManager.openFd(key.getName()); // assetKey.getName()
mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(),
afd.getLength());
mp.prepare();
setSourceParams(src, mp);
src.setChannel(0);
src.setStatus(Status.Playing);
musicPlaying.put(src, mp);
mp.start();
} else {
mp.start();
}
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
private void setSourceParams(AudioSource src, MediaPlayer mp) {
mp.setLooping(src.isLooping());
mp.setVolume(src.getVolume(), src.getVolume());
//src.getDryFilter();
}
private void setSourceParams(AudioSource src) {
soundPool.setLoop(src.getChannel(), src.isLooping() ? -1 : 0);
soundPool.setVolume(src.getChannel(), src.getVolume(), src.getVolume());
}
/**
* Pause the current playing sounds. Both from the {@link SoundPool} and the
* active {@link MediaPlayer}s
*/
public void pauseAll() {
if (soundPool != null) {
soundPool.autoPause();
for (MediaPlayer mp : musicPlaying.values()) {
if(mp.isPlaying()){
mp.pause();
}
}
}
}
/**
* Resume all paused sounds.
*/
public void resumeAll() {
if (soundPool != null) {
soundPool.autoResume();
for (MediaPlayer mp : musicPlaying.values()) {
mp.start(); //no resume -> api says call start to resume
}
}
}
public void pauseSource(AudioSource src) {
if (audioDisabled) {
return;
}
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.pause();
src.setStatus(Status.Paused);
} else {
int channel = src.getChannel();
if (channel != -1) {
soundPool.pause(channel); // is not very likley to make
} // something useful :)
}
}
public void stopSource(AudioSource src) {
if (audioDisabled) {
return;
}
// can be stream or buffer -> so try to get mediaplayer
// if there is non try to stop soundpool
MediaPlayer mp = musicPlaying.get(src);
if (mp != null) {
mp.stop();
mp.reset();
src.setStatus(Status.Stopped);
} else {
int channel = src.getChannel();
if (channel != -1) {
soundPool.pause(channel); // is not very likley to make
// something useful :)
}
}
}
@Override
public void deleteAudioData(AudioData ad) {
for (AudioSource src : musicPlaying.keySet()) {
if (src.getAudioData() == ad) {
MediaPlayer mp = musicPlaying.remove(src);
mp.stop();
mp.release();
src.setStatus(Status.Stopped);
src.setChannel(-1);
ad.setId(-1);
break;
}
}
if (ad.getId() > 0) {
soundPool.unload(ad.getId());
ad.setId(-1);
}
}
@Override
public void setEnvironment(Environment env) {
// not yet supported
}
@Override
public void deleteFilter(Filter filter) {
}
}

@ -44,7 +44,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
public class AndroidOpenALSoftAudioRenderer implements AudioRenderer, Runnable {
public class AndroidOpenALSoftAudioRenderer implements AndroidAudioRenderer, Runnable {
private static final Logger logger = Logger.getLogger(AndroidOpenALSoftAudioRenderer.class.getName());
private final NativeObjectManager objManager = new NativeObjectManager();

@ -10,25 +10,25 @@ import com.jme3.asset.AndroidImageInfo;
import com.jme3.asset.AssetManager;
import com.jme3.audio.AudioRenderer;
import com.jme3.audio.android.AndroidAudioRenderer;
import com.jme3.audio.android.AndroidMediaPlayerAudioRenderer;
import com.jme3.audio.android.AndroidOpenALSoftAudioRenderer;
import com.jme3.system.*;
import com.jme3.system.JmeContext.Type;
import com.jme3.texture.Image;
import com.jme3.texture.image.DefaultImageRaster;
import com.jme3.texture.image.ImageRaster;
import com.jme3.util.AndroidScreenshots;
import com.jme3.util.JmeFormatter;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
public class JmeAndroidSystem extends JmeSystemDelegate {
private static Activity activity;
private static String audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
static {
try {
@ -97,6 +97,16 @@ public class JmeAndroidSystem extends JmeSystemDelegate {
@Override
public JmeContext newContext(AppSettings settings, Type contextType) {
if (settings.getAudioRenderer().equals(AppSettings.ANDROID_MEDIAPLAYER)) {
logger.log(Level.INFO, "newContext settings set to Android MediaPlayer / SoundPool");
audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
} else if (settings.getAudioRenderer().equals(AppSettings.ANDROID_OPENAL_SOFT)) {
logger.log(Level.INFO, "newContext settings set to Android OpenAL Soft");
audioRendererType = AppSettings.ANDROID_OPENAL_SOFT;
} else {
logger.log(Level.INFO, "AudioRenderer not set. Defaulting to Android MediaPlayer / SoundPool");
audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
}
initialize(settings);
JmeContext ctx = new OGLESContext();
ctx.setSettings(settings);
@ -105,7 +115,20 @@ public class JmeAndroidSystem extends JmeSystemDelegate {
@Override
public AudioRenderer newAudioRenderer(AppSettings settings) {
return new AndroidAudioRenderer(activity);
if (settings.getAudioRenderer().equals(AppSettings.ANDROID_MEDIAPLAYER)) {
logger.log(Level.INFO, "newAudioRenderer settings set to Android MediaPlayer / SoundPool");
audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
return new AndroidMediaPlayerAudioRenderer(activity);
} else if (settings.getAudioRenderer().equals(AppSettings.ANDROID_OPENAL_SOFT)) {
logger.log(Level.INFO, "newAudioRenderer settings set to Android OpenAL Soft");
audioRendererType = AppSettings.ANDROID_OPENAL_SOFT;
return new AndroidOpenALSoftAudioRenderer();
} else {
logger.log(Level.INFO, "AudioRenderer not set. Defaulting to Android MediaPlayer / SoundPool");
audioRendererType = AppSettings.ANDROID_MEDIAPLAYER;
return new AndroidMediaPlayerAudioRenderer(activity);
}
}
@Override
@ -198,4 +221,8 @@ public class JmeAndroidSystem extends JmeSystemDelegate {
public static Activity getActivity() {
return activity;
}
public static String getAudioRendererType() {
return audioRendererType;
}
}

@ -105,6 +105,25 @@ public final class AppSettings extends HashMap<String, Object> {
*/
public static final String LWJGL_OPENAL = "LWJGL";
/**
* Use the Android MediaPlayer / SoundPool based renderer for Android audio capabilities.
* <p>
* NOTE: Supports Android 2.2+ platforms. This is the current default for
* Android platforms.
*
* @see AppSettings#setAudioRenderer(java.lang.String)
*/
public static final String ANDROID_MEDIAPLAYER = "MediaPlayer";
/**
* Use the OpenAL Soft based renderer for Android audio capabilities.
* <p>
* NOTE: Only to be used on Android 2.3+ platforms due to using OpenSL.
*
* @see AppSettings#setAudioRenderer(java.lang.String)
*/
public static final String ANDROID_OPENAL_SOFT = "OpenAL_SOFT";
static {
defaults.put("Width", 640);
defaults.put("Height", 480);

Loading…
Cancel
Save