AudioSource: add method to get playback time
As was requested on the forum, getting playback time / position is needed to perform proper audio / video synchronization.
This commit is contained in:
parent
abc8ec0e5d
commit
7393f79165
@ -525,4 +525,9 @@ public class AndroidMediaPlayerAudioRenderer implements AudioRenderer,
|
|||||||
@Override
|
@Override
|
||||||
public void deleteFilter(Filter filter) {
|
public void deleteFilter(Filter filter) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float getSourcePlaybackTime(AudioSource src) {
|
||||||
|
throw new UnsupportedOperationException("Not supported yet.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -409,6 +409,14 @@ public class AudioNode extends Node implements AudioSource {
|
|||||||
play();
|
play();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float getPlaybackTime() {
|
||||||
|
if (channel >= 0)
|
||||||
|
return getRenderer().getSourcePlaybackTime(this);
|
||||||
|
else
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
public Vector3f getPosition() {
|
public Vector3f getPosition() {
|
||||||
return getWorldTranslation();
|
return getWorldTranslation();
|
||||||
|
@ -59,6 +59,7 @@ public interface AudioRenderer {
|
|||||||
|
|
||||||
public void updateSourceParam(AudioSource src, AudioParam param);
|
public void updateSourceParam(AudioSource src, AudioParam param);
|
||||||
public void updateListenerParam(Listener listener, ListenerParam param);
|
public void updateListenerParam(Listener listener, ListenerParam param);
|
||||||
|
public float getSourcePlaybackTime(AudioSource src);
|
||||||
|
|
||||||
public void deleteFilter(Filter filter);
|
public void deleteFilter(Filter filter);
|
||||||
public void deleteAudioData(AudioData ad);
|
public void deleteAudioData(AudioData ad);
|
||||||
|
@ -95,6 +95,11 @@ public interface AudioSource {
|
|||||||
* @return the time offset in the sound sample when to start playing.
|
* @return the time offset in the sound sample when to start playing.
|
||||||
*/
|
*/
|
||||||
public float getTimeOffset();
|
public float getTimeOffset();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the current playback position of the source in seconds.
|
||||||
|
*/
|
||||||
|
public float getPlaybackTime();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return The velocity of the audio source.
|
* @return The velocity of the audio source.
|
||||||
|
@ -54,6 +54,8 @@ public class AudioStream extends AudioData implements Closeable {
|
|||||||
protected boolean eof = false;
|
protected boolean eof = false;
|
||||||
protected int[] ids;
|
protected int[] ids;
|
||||||
|
|
||||||
|
protected int unqueuedBuffersBytes = 0;
|
||||||
|
|
||||||
public AudioStream() {
|
public AudioStream() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
@ -196,10 +198,21 @@ public class AudioStream extends AudioData implements Closeable {
|
|||||||
return in instanceof SeekableStream;
|
return in instanceof SeekableStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int getUnqueuedBufferBytes() {
|
||||||
|
return unqueuedBuffersBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUnqueuedBufferBytes(int unqueuedBuffers) {
|
||||||
|
this.unqueuedBuffersBytes = unqueuedBuffers;
|
||||||
|
}
|
||||||
|
|
||||||
public void setTime(float time) {
|
public void setTime(float time) {
|
||||||
if (in instanceof SeekableStream) {
|
if (in instanceof SeekableStream) {
|
||||||
((SeekableStream) in).setTime(time);
|
((SeekableStream) in).setTime(time);
|
||||||
eof = false;
|
eof = false;
|
||||||
|
|
||||||
|
// TODO: when we actually support seeking, this will need to be properly set.
|
||||||
|
unqueuedBuffersBytes = 0;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
"Cannot use setTime on a stream that "
|
"Cannot use setTime on a stream that "
|
||||||
|
@ -301,6 +301,58 @@ public class ALAudioRenderer implements AudioRenderer, Runnable {
|
|||||||
f.clearUpdateNeeded();
|
f.clearUpdateNeeded();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float getSourcePlaybackTime(AudioSource src) {
|
||||||
|
checkDead();
|
||||||
|
synchronized (threadLock) {
|
||||||
|
if (audioDisabled) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// See comment in updateSourceParam().
|
||||||
|
if (src.getChannel() < 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int id = channels[src.getChannel()];
|
||||||
|
AudioData data = src.getAudioData();
|
||||||
|
int playbackOffsetBytes = 0;
|
||||||
|
|
||||||
|
if (data instanceof AudioStream) {
|
||||||
|
// Because audio streams are processed in buffer chunks,
|
||||||
|
// we have to compute the amount of time the stream was already
|
||||||
|
// been playing based on the number of buffers that were processed.
|
||||||
|
AudioStream stream = (AudioStream) data;
|
||||||
|
|
||||||
|
// NOTE: the assumption is that all enqueued buffers are the same size.
|
||||||
|
// this is currently enforced by fillBuffer().
|
||||||
|
|
||||||
|
// The number of unenqueued bytes that the decoder thread
|
||||||
|
// keeps track of.
|
||||||
|
int unqueuedBytes = stream.getUnqueuedBufferBytes();
|
||||||
|
|
||||||
|
// Additional processed buffers that the decoder thread
|
||||||
|
// did not unenqueue yet (it only updates 20 times per second).
|
||||||
|
int unqueuedBytesExtra = al.alGetSourcei(id, AL_BUFFERS_PROCESSED) * BUFFER_SIZE;
|
||||||
|
|
||||||
|
// Total additional bytes that need to be considered.
|
||||||
|
playbackOffsetBytes = unqueuedBytes; // + unqueuedBytesExtra;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add byte offset from source (for both streams and buffers)
|
||||||
|
playbackOffsetBytes += al.alGetSourcei(id, AL_BYTE_OFFSET);
|
||||||
|
|
||||||
|
// Compute time value from bytes
|
||||||
|
// E.g. for 44100 source with 2 channels and 16 bits per sample:
|
||||||
|
// (44100 * 2 * 16 / 8) = 176400
|
||||||
|
int bytesPerSecond = (data.getSampleRate() *
|
||||||
|
data.getChannels() *
|
||||||
|
data.getBitsPerSample() / 8);
|
||||||
|
|
||||||
|
return (float)playbackOffsetBytes / bytesPerSecond;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void updateSourceParam(AudioSource src, AudioParam param) {
|
public void updateSourceParam(AudioSource src, AudioParam param) {
|
||||||
checkDead();
|
checkDead();
|
||||||
synchronized (threadLock) {
|
synchronized (threadLock) {
|
||||||
@ -648,6 +700,7 @@ public class ALAudioRenderer implements AudioRenderer, Runnable {
|
|||||||
private boolean fillStreamingSource(int sourceId, AudioStream stream, boolean looping) {
|
private boolean fillStreamingSource(int sourceId, AudioStream stream, boolean looping) {
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
int processed = al.alGetSourcei(sourceId, AL_BUFFERS_PROCESSED);
|
int processed = al.alGetSourcei(sourceId, AL_BUFFERS_PROCESSED);
|
||||||
|
int unqueuedBufferBytes = 0;
|
||||||
|
|
||||||
for (int i = 0; i < processed; i++) {
|
for (int i = 0; i < processed; i++) {
|
||||||
int buffer;
|
int buffer;
|
||||||
@ -656,6 +709,11 @@ public class ALAudioRenderer implements AudioRenderer, Runnable {
|
|||||||
al.alSourceUnqueueBuffers(sourceId, 1, ib);
|
al.alSourceUnqueueBuffers(sourceId, 1, ib);
|
||||||
buffer = ib.get(0);
|
buffer = ib.get(0);
|
||||||
|
|
||||||
|
// XXX: assume that reading from AudioStream always
|
||||||
|
// gives BUFFER_SIZE amount of bytes! This might not always
|
||||||
|
// be the case...
|
||||||
|
unqueuedBufferBytes += BUFFER_SIZE;
|
||||||
|
|
||||||
boolean active = fillBuffer(stream, buffer);
|
boolean active = fillBuffer(stream, buffer);
|
||||||
|
|
||||||
if (!active && !stream.isEOF()) {
|
if (!active && !stream.isEOF()) {
|
||||||
@ -682,6 +740,8 @@ public class ALAudioRenderer implements AudioRenderer, Runnable {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stream.setUnqueuedBufferBytes(stream.getUnqueuedBufferBytes() + unqueuedBufferBytes);
|
||||||
|
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user