* Added new Jheora video system, however as advertised, audio will NOT work. If you want to try and fix it, go ahead.

git-svn-id: https://jmonkeyengine.googlecode.com/svn/trunk@9312 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
3.0
Sha..rd 13 years ago
parent 988110e360
commit 12d3e277e6
  1. 179
      engine/src/jheora/com/jme3/newvideo/InputStreamSrc.java
  2. 412
      engine/src/jheora/com/jme3/newvideo/JmeVideoPipeline.java
  3. 144
      engine/src/jheora/com/jme3/newvideo/TestNewVideo.java
  4. 98
      engine/src/jheora/com/jme3/newvideo/TextureVideoSink.java
  5. 27
      engine/src/jheora/com/jme3/newvideo/VideoTexture.java
  6. 109
      engine/src/jheora/com/jme3/newvideo/YUV2Texture.java
  7. 143
      engine/src/jheora/com/jme3/newvideo/YUVConv.java

@ -0,0 +1,179 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.newvideo;
import com.fluendo.jst.Buffer;
import com.fluendo.jst.Caps;
import com.fluendo.jst.Element;
import com.fluendo.jst.ElementFactory;
import com.fluendo.jst.Event;
import com.fluendo.jst.Message;
import com.fluendo.jst.Pad;
import com.fluendo.utils.Debug;
import java.io.IOException;
import java.io.InputStream;
public class InputStreamSrc extends Element {
private InputStream input;
private long contentLength;
private long offset = 0;
private long offsetLastMessage = 0;
private long skipBytes = 0;
private String mime;
private Caps outCaps;
private boolean discont = true;
private static final int DEFAULT_READSIZE = 4096;
private int readSize = DEFAULT_READSIZE;
private Pad srcpad = new Pad(Pad.SRC, "src") {
@Override
protected void taskFunc() {
int ret;
int toRead;
long left;
// Skip to the target offset if required
if (skipBytes > 0) {
Debug.info("Skipping " + skipBytes + " input bytes");
try {
offset += input.skip(skipBytes);
} catch (IOException e) {
Debug.error("input.skip error: " + e);
postMessage(Message.newError(this, "File read error"));
return;
}
skipBytes = 0;
}
// Calculate the read size
if (contentLength != -1) {
left = contentLength - offset;
} else {
left = -1;
}
if (left != -1 && left < readSize) {
toRead = (int) left;
} else {
toRead = readSize;
}
// Perform the read
Buffer data = Buffer.create();
data.ensureSize(toRead);
data.offset = 0;
try {
if (toRead > 0) {
data.length = input.read(data.data, 0, toRead);
} else {
data.length = -1;
}
} catch (Exception e) {
e.printStackTrace();
data.length = 0;
}
if (data.length <= 0) {
/* EOS */
postMessage(Message.newBytePosition(this, offset));
offsetLastMessage = offset;
try {
input.close();
} catch (Exception e) {
e.printStackTrace();
}
data.free();
Debug.log(Debug.INFO, this + " reached EOS");
pushEvent(Event.newEOS());
postMessage(Message.newStreamStatus(this, false, Pad.UNEXPECTED, "reached EOS"));
pauseTask();
return;
}
offset += data.length;
if (offsetLastMessage > offset) {
offsetLastMessage = 0;
}
if (offset - offsetLastMessage > contentLength / 100) {
postMessage(Message.newBytePosition(this, offset));
offsetLastMessage = offset;
}
// Negotiate capabilities
if (srcpad.getCaps() == null) {
String typeMime;
typeMime = ElementFactory.typeFindMime(data.data, data.offset, data.length);
Debug.log(Debug.INFO, "using typefind contentType: " + typeMime);
mime = typeMime;
outCaps = new Caps(mime);
srcpad.setCaps(outCaps);
}
data.caps = outCaps;
data.setFlag(com.fluendo.jst.Buffer.FLAG_DISCONT, discont);
discont = false;
// Push the data to the peer
if ((ret = push(data)) != OK) {
if (isFlowFatal(ret) || ret == Pad.NOT_LINKED) {
postMessage(Message.newError(this, "error: " + getFlowName(ret)));
pushEvent(Event.newEOS());
}
postMessage(Message.newStreamStatus(this, false, ret, "reason: " + getFlowName(ret)));
pauseTask();
}
}
@Override
protected boolean activateFunc(int mode) {
switch (mode) {
case MODE_NONE:
postMessage(Message.newStreamStatus(this, false, Pad.WRONG_STATE, "stopping"));
input = null;
outCaps = null;
mime = null;
return stopTask();
case MODE_PUSH:
contentLength = -1;
// until we can determine content length from IS?
// if (contentLength != -1) {
// postMessage(Message.newDuration(this, Format.BYTES, contentLength));
// }
if (input == null)
return false;
postMessage(Message.newStreamStatus(this, true, Pad.OK, "activating"));
return startTask("JmeVideo-Src-Stream-" + Debug.genId());
default:
return false;
}
}
};
public String getFactoryName() {
return "inputstreamsrc";
}
public InputStreamSrc(String name) {
super(name);
addPad(srcpad);
}
@Override
public synchronized boolean setProperty(String name, java.lang.Object value) {
if (name.equals("inputstream")){
input = (InputStream) value;
}else if (name.equals("readSize")) {
readSize = Integer.parseInt((String) value);
} else {
return false;
}
return true;
}
}

@ -0,0 +1,412 @@
package com.jme3.newvideo;
import com.fluendo.jst.Caps;
import com.fluendo.jst.CapsListener;
import com.fluendo.jst.Clock;
import com.fluendo.jst.Element;
import com.fluendo.jst.ElementFactory;
import com.fluendo.jst.Format;
import com.fluendo.jst.Message;
import com.fluendo.jst.Pad;
import com.fluendo.jst.PadListener;
import com.fluendo.jst.Pipeline;
import com.fluendo.jst.Query;
import com.fluendo.utils.Debug;
import com.jme3.app.Application;
import com.jme3.texture.Texture2D;
import java.io.InputStream;
public class JmeVideoPipeline extends Pipeline implements PadListener, CapsListener {
private boolean enableAudio;
private boolean enableVideo;
private int bufferSize = -1;
private int bufferLow = -1;
private int bufferHigh = -1;
private Element inputstreamsrc;
private Element buffer;
private Element demux;
private Element videodec;
private Element audiodec;
private Element videosink;
private Element audiosink;
private Element yuv2tex;
private Element v_queue, v_queue2, a_queue = null;
private Pad asinkpad, ovsinkpad;
private Pad apad, vpad;
public boolean usingJavaX = false;
public InputStream inputStream;
private Application app;
public JmeVideoPipeline(Application app) {
super("pipeline");
enableAudio = true;
enableVideo = true;
this.app = app;
}
private void noSuchElement(String elemName) {
postMessage(Message.newError(this, "no such element: " + elemName));
}
public void padAdded(Pad pad) {
Caps caps = pad.getCaps();
if (caps == null) {
Debug.log(Debug.INFO, "pad added without caps: " + pad);
return;
}
Debug.log(Debug.INFO, "pad added " + pad);
String mime = caps.getMime();
if (mime.equals("audio/x-vorbis")) {
if (true)
return;
if (a_queue != null) {
Debug.log(Debug.INFO, "More than one audio stream detected, ignoring all except first one");
return;
}
a_queue = ElementFactory.makeByName("queue", "a_queue");
if (a_queue == null) {
noSuchElement("queue");
return;
}
// if we already have a video queue: We want smooth audio playback
// over frame completeness, so make the video queue leaky
if (v_queue != null) {
v_queue.setProperty("leaky", "2"); // 2 == Queue.LEAK_DOWNSTREAM
}
audiodec = ElementFactory.makeByName("vorbisdec", "audiodec");
if (audiodec == null) {
noSuchElement("vorbisdec");
return;
}
a_queue.setProperty("maxBuffers", "100");
add(a_queue);
add(audiodec);
pad.link(a_queue.getPad("sink"));
a_queue.getPad("src").link(audiodec.getPad("sink"));
if (!audiodec.getPad("src").link(asinkpad)) {
postMessage(Message.newError(this, "audiosink already linked"));
return;
}
apad = pad;
audiodec.setState(PAUSE);
a_queue.setState(PAUSE);
} else if (enableVideo && mime.equals("video/x-theora")) {
// Constructs a chain of the form
// oggdemux -> v_queue -> theoradec -> v_queue2 -> videosink
v_queue = ElementFactory.makeByName("queue", "v_queue");
v_queue2 = ElementFactory.makeByName("queue", "v_queue2");
yuv2tex = new YUV2Texture(app);
if (v_queue == null) {
noSuchElement("queue");
return;
}
videodec = ElementFactory.makeByName("theoradec", "videodec");
if (videodec == null) {
noSuchElement("theoradec");
return;
}
add(videodec);
// if we have audio: We want smooth audio playback
// over frame completeness
if (a_queue != null) {
v_queue.setProperty("leaky", "2"); // 2 == Queue.LEAK_DOWNSTREAM
}
v_queue.setProperty("maxBuffers", "5");
v_queue2.setProperty("maxBuffers", "5");
v_queue2.setProperty("isBuffer", Boolean.FALSE);
add(v_queue);
add(v_queue2);
add(yuv2tex);
pad.link(v_queue.getPad("sink"));
v_queue.getPad("src").link(videodec.getPad("sink"));
// WITH YUV2TEX
videodec.getPad("src").link(yuv2tex.getPad("sink"));
yuv2tex.getPad("src").link(v_queue2.getPad("sink"));
v_queue2.getPad("src").link(videosink.getPad("sink"));
// WITHOUT YUV2TEX
// videodec.getPad("src").link(v_queue2.getPad("sink"));
if (!v_queue2.getPad("src").link(ovsinkpad)) {
postMessage(Message.newError(this, "videosink already linked"));
return;
}
vpad = pad;
videodec.setState(PAUSE);
v_queue.setState(PAUSE);
v_queue2.setState(PAUSE);
yuv2tex.setState(PAUSE);
}
}
public void padRemoved(Pad pad) {
pad.unlink();
if (pad == vpad) {
Debug.log(Debug.INFO, "video pad removed " + pad);
ovsinkpad.unlink();
vpad = null;
} else if (pad == apad) {
Debug.log(Debug.INFO, "audio pad removed " + pad);
asinkpad.unlink();
apad = null;
}
}
@Override
public void noMorePads() {
boolean changed = false;
Debug.log(Debug.INFO, "all streams detected");
if (apad == null && enableAudio) {
Debug.log(Debug.INFO, "file has no audio, remove audiosink");
audiosink.setState(STOP);
remove(audiosink);
audiosink = null;
changed = true;
if (videosink != null) {
// videosink.setProperty("max-lateness", Long.toString(Long.MAX_VALUE));
videosink.setProperty("max-lateness", ""+Clock.SECOND);
}
}
if (vpad == null && enableVideo) {
Debug.log(Debug.INFO, "file has no video, remove videosink");
videosink.setState(STOP);
remove(videosink);
videosink = null;
changed = true;
}
if (changed) {
scheduleReCalcState();
}
}
public Texture2D getTexture(){
if (videosink != null){
return (Texture2D) videosink.getProperty("texture");
}
return null;
}
public boolean buildOggPipeline() {
demux = ElementFactory.makeByName("oggdemux", "OggFileDemuxer");
if (demux == null) {
noSuchElement("oggdemux");
return false;
}
buffer = ElementFactory.makeByName("queue", "BufferQueue");
if (buffer == null) {
demux = null;
noSuchElement("queue");
return false;
}
buffer.setProperty("isBuffer", Boolean.TRUE);
if (bufferSize != -1) {
buffer.setProperty("maxSize", new Integer(bufferSize * 1024));
}
if (bufferLow != -1) {
buffer.setProperty("lowPercent", new Integer(bufferLow));
}
if (bufferHigh != -1) {
buffer.setProperty("highPercent", new Integer(bufferHigh));
}
add(demux);
add(buffer);
// Link input stream source with bufferqueue's sink
inputstreamsrc.getPad("src").link(buffer.getPad("sink"));
// Link bufferqueue's source with the oggdemuxer's sink
buffer.getPad("src").link(demux.getPad("sink"));
// Receive pad events from OggDemuxer
demux.addPadListener(this);
buffer.setState(PAUSE);
demux.setState(PAUSE);
return true;
}
public void capsChanged(Caps caps) {
String mime = caps.getMime();
if (mime.equals("application/ogg")) {
buildOggPipeline();
} else {
postMessage(Message.newError(this, "Unknown MIME type: " + mime));
}
}
private boolean openFile() {
inputstreamsrc = new InputStreamSrc("InputStreamSource");
inputstreamsrc.setProperty("inputstream", inputStream);
add(inputstreamsrc);
// Receive caps from InputStream source
inputstreamsrc.getPad("src").addCapsListener(this);
audiosink = newAudioSink();
if (audiosink == null) {
enableAudio = false;
} else {
asinkpad = audiosink.getPad("sink");
add(audiosink);
}
if (enableVideo) {
videosink = new TextureVideoSink("TextureVideoSink");
videosink.setProperty("max-lateness", ""+Clock.SECOND);
// Long.toString(enableAudio ? Clock.MSECOND * 20 : Long.MAX_VALUE));
add(videosink);
ovsinkpad = videosink.getPad("sink");
}
if (audiosink == null && videosink == null) {
postMessage(Message.newError(this, "Both audio and video are disabled, can't play anything"));
return false;
}
return true;
}
protected Element newAudioSink() {
com.fluendo.plugin.AudioSink s;
try {
s = (com.fluendo.plugin.AudioSink) ElementFactory.makeByName("audiosinkj2", "audiosink");
Debug.log(Debug.INFO, "using high quality javax.sound backend");
} catch (Throwable e) {
s = null;
noSuchElement ("audiosink");
return null;
}
if (!s.test()) {
return null;
} else {
return s;
}
}
private boolean cleanup() {
Debug.log(Debug.INFO, "cleanup");
if (inputstreamsrc != null) {
remove(inputstreamsrc);
inputstreamsrc = null;
}
if (audiosink != null) {
remove(audiosink);
audiosink = null;
asinkpad = null;
}
if (videosink != null) {
remove(videosink);
videosink = null;
}
if (buffer != null) {
remove(buffer);
buffer = null;
}
if (demux != null) {
demux.removePadListener(this);
remove(demux);
demux = null;
}
if (v_queue != null) {
remove(v_queue);
v_queue = null;
}
if (v_queue2 != null) {
remove(v_queue2);
v_queue2 = null;
}
if (yuv2tex != null){
remove(yuv2tex);
yuv2tex = null;
}
if (a_queue != null) {
remove(a_queue);
a_queue = null;
}
if (videodec != null) {
remove(videodec);
videodec = null;
}
if (audiodec != null) {
remove(audiodec);
audiodec = null;
}
return true;
}
@Override
protected int changeState(int transition) {
int res;
switch (transition) {
case STOP_PAUSE:
if (!openFile()) {
return FAILURE;
}
break;
default:
break;
}
res = super.changeState(transition);
switch (transition) {
case PAUSE_STOP:
cleanup();
break;
default:
break;
}
return res;
}
@Override
protected boolean doSendEvent(com.fluendo.jst.Event event) {
return false; // no seek support
}
protected long getPosition() {
Query q;
long result = 0;
q = Query.newPosition(Format.TIME);
if (super.query(q)){
result = q.parsePositionValue();
}
return result;
}
}

@ -0,0 +1,144 @@
/*
* Copyright (c) 2009-2010 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.newvideo;
import com.fluendo.jst.BusHandler;
import com.fluendo.jst.Message;
import com.fluendo.jst.Pipeline;
import com.fluendo.utils.Debug;
import com.jme3.app.SimpleApplication;
import com.jme3.system.AppSettings;
import com.jme3.texture.Texture2D;
import com.jme3.ui.Picture;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
public class TestNewVideo extends SimpleApplication implements BusHandler {
private Picture picture;
private JmeVideoPipeline p;
private int frame = 0;
public static void main(String[] args){
TestNewVideo app = new TestNewVideo();
AppSettings settings = new AppSettings(true);
// settings.setFrameRate(24);
app.setSettings(settings);
app.start();
}
private void createVideo(){
Debug.level = Debug.INFO;
p = new JmeVideoPipeline(this);
p.getBus().addHandler(this);
try {
p.inputStream = new FileInputStream("E:\\VideoTest.ogv");
} catch (FileNotFoundException ex) {
ex.printStackTrace();
}
p.setState(Pipeline.PLAY);
}
@Override
public void simpleUpdate(float tpf){
// if (p == null)
// return;
Texture2D tex = p.getTexture();
if (tex == null)
return;
if (picture != null){
synchronized (tex){
try {
tex.wait();
} catch (InterruptedException ex) {
// ignore
}
tex.getImage().setUpdateNeeded();
renderer.setTexture(0, tex);
((VideoTexture)tex).free();
System.out.println("PLAY : " + (frame++));
}
return;
}
picture = new Picture("VideoPicture", true);
picture.setPosition(0, 0);
picture.setWidth(settings.getWidth());
picture.setHeight(settings.getHeight());
picture.setTexture(assetManager, tex, false);
rootNode.attachChild(picture);
}
public void simpleInitApp() {
// start video playback
createVideo();
}
@Override
public void destroy(){
if (p != null){
p.setState(Pipeline.STOP);
p.shutDown();
}
super.destroy();
}
public void handleMessage(Message msg) {
switch (msg.getType()){
case Message.EOS:
Debug.log(Debug.INFO, "EOS: playback ended");
/*
enqueue(new Callable<Void>(){
public Void call() throws Exception {
rootNode.detachChild(picture);
p.setState(Element.STOP);
p.shutDown();
p = null;
return null;
}
});
Texture2D tex = p.getTexture();
synchronized (tex){
tex.notifyAll();
}
*/
break;
case Message.STREAM_STATUS:
Debug.info(msg.toString());
break;
}
}
}

@ -0,0 +1,98 @@
package com.jme3.newvideo;
import com.fluendo.jst.Buffer;
import com.fluendo.jst.Caps;
import com.fluendo.jst.Pad;
import com.fluendo.jst.Sink;
import com.fluendo.utils.Debug;
import com.jme3.texture.Image;
import com.jme3.texture.Texture2D;
public class TextureVideoSink extends Sink {
private Texture2D outTex;
private int width, height;
private int frame = 0;
public TextureVideoSink(String name) {
super();
setName(name);
}
@Override
protected boolean setCapsFunc(Caps caps) {
String mime = caps.getMime();
if (!mime.equals("video/raw")) {
return false;
}
width = caps.getFieldInt("width", -1);
height = caps.getFieldInt("height", -1);
if (width == -1 || height == -1) {
return false;
}
// aspectX = caps.getFieldInt("aspect_x", 1);
// aspectY = caps.getFieldInt("aspect_y", 1);
//
// if (!ignoreAspect) {
// Debug.log(Debug.DEBUG, this + " dimension: " + width + "x" + height + ", aspect: " + aspectX + "/" + aspectY);
//
// if (aspectY > aspectX) {
// height = height * aspectY / aspectX;
// } else {
// width = width * aspectX / aspectY;
// }
// Debug.log(Debug.DEBUG, this + " scaled source: " + width + "x" + height);
// }
outTex = new Texture2D();
return true;
}
@Override
protected int preroll(Buffer buf) {
return render(buf);
}
@Override
protected int render(Buffer buf) {
if (buf.duplicate)
return Pad.OK;
Debug.log(Debug.DEBUG, this.getName() + " starting buffer " + buf);
if (buf.object instanceof Image){
synchronized (outTex){
outTex.setImage( (Image) buf.object );
outTex.notifyAll();
System.out.println("PUSH : " + (frame++));
}
} else {
System.out.println(this + ": unknown buffer received " + buf.object);
return Pad.ERROR;
}
if (outTex == null) {
return Pad.NOT_NEGOTIATED;
}
Debug.log(Debug.DEBUG, this.getName() + " done with buffer " + buf);
return Pad.OK;
}
public String getFactoryName() {
return "texturevideosink";
}
@Override
public java.lang.Object getProperty(String name) {
if (name.equals("texture")) {
return outTex;
} else {
return super.getProperty(name);
}
}
}

@ -0,0 +1,27 @@
package com.jme3.newvideo;
import com.jme3.texture.Image;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture2D;
import com.jme3.util.BufferUtils;
import java.util.concurrent.BlockingQueue;
public final class VideoTexture extends Texture2D {
private BlockingQueue<VideoTexture> ownerQueue;
public VideoTexture(int width, int height, Format format, BlockingQueue<VideoTexture> ownerQueue){
super(new Image(format, width, height,
BufferUtils.createByteBuffer(width*height*format.getBitsPerPixel()/8)));
this.ownerQueue = ownerQueue;
}
public void free(){
try {
ownerQueue.put(this);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
}

@ -0,0 +1,109 @@
package com.jme3.newvideo;
import com.fluendo.jheora.YUVBuffer;
import com.fluendo.jst.Buffer;
import com.fluendo.jst.Element;
import com.fluendo.jst.Event;
import com.fluendo.jst.Pad;
import com.jme3.app.Application;
import com.jme3.texture.Image.Format;
import java.awt.image.FilteredImageSource;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
public class YUV2Texture extends Element {
private YUVConv conv = new YUVConv();
private int width, height;
private BlockingQueue<VideoTexture> frameQueue;
private Application app;
private int frame = 0;
private YUVBuffer getYUVBuffer(Buffer buf){
if (buf.object instanceof FilteredImageSource) {
FilteredImageSource imgSrc = (FilteredImageSource) buf.object;
try {
Field srcField = imgSrc.getClass().getDeclaredField("src");
srcField.setAccessible(true);
return (YUVBuffer) srcField.get(imgSrc);
} catch (Exception e){
throw new RuntimeException(e);
}
}else if (buf.object instanceof YUVBuffer){
return (YUVBuffer) buf.object;
}else{
throw new RuntimeException("Expected buffer");
}
}
private VideoTexture decode(YUVBuffer yuv){
if (frameQueue == null){
frameQueue = new ArrayBlockingQueue<VideoTexture>(20);
for (int i = 0; i < 20; i++){
VideoTexture img = new VideoTexture(yuv.y_width, yuv.y_height, Format.RGBA8, frameQueue);
frameQueue.add(img);
}
}
try {
final VideoTexture videoTex = frameQueue.take();
ByteBuffer outBuf = videoTex.getImage().getData(0);
conv.convert(yuv, 0, 0, yuv.y_width, yuv.y_height);
outBuf.clear();
outBuf.asIntBuffer().put(conv.getRGBData()).clear();
app.enqueue( new Callable<Void>() {
public Void call() throws Exception {
videoTex.getImage().setUpdateNeeded();
app.getRenderer().setTexture(0, videoTex);
return null;
}
});
return videoTex;
} catch (InterruptedException ex) {
}
return null;
}
private Pad srcPad = new Pad(Pad.SRC, "src") {
@Override
protected boolean eventFunc(Event event) {
return sinkPad.pushEvent(event);
}
};
private Pad sinkPad = new Pad(Pad.SINK, "sink") {
@Override
protected boolean eventFunc(Event event) {
return srcPad.pushEvent(event);
}
@Override
protected int chainFunc (Buffer buf) {
YUVBuffer yuv = getYUVBuffer(buf);
buf.object = decode(yuv);
System.out.println("DECODE: " + (frame++));
return srcPad.push(buf);
}
};
public YUV2Texture(Application app) {
super("YUV2Texture");
addPad(srcPad);
addPad(sinkPad);
this.app = app;
}
@Override
public String getFactoryName() {
return "yuv2tex";
}
}

@ -0,0 +1,143 @@
/*
* Copyright (c) 2009-2010 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.newvideo;
import com.fluendo.jheora.YUVBuffer;
@Deprecated
public final class YUVConv {
private int[] pixels;
private static final int VAL_RANGE = 256;
private static final int SHIFT = 16;
private static final int CR_FAC = (int) (1.402 * (1 << SHIFT));
private static final int CB_FAC = (int) (1.772 * (1 << SHIFT));
private static final int CR_DIFF_FAC = (int) (0.71414 * (1 << SHIFT));
private static final int CB_DIFF_FAC = (int) (0.34414 * (1 << SHIFT));
private static int[] r_tab = new int[VAL_RANGE * 3];
private static int[] g_tab = new int[VAL_RANGE * 3];
private static int[] b_tab = new int[VAL_RANGE * 3];
static {
setupRgbYuvAccelerators();
}
private static final short clamp255(int val) {
val -= 255;
val = -(255 + ((val >> (31)) & val));
return (short) -((val >> 31) & val);
}
private static void setupRgbYuvAccelerators() {
for (int i = 0; i < VAL_RANGE * 3; i++) {
r_tab[i] = clamp255(i - VAL_RANGE);
g_tab[i] = clamp255(i - VAL_RANGE) << 8;
b_tab[i] = clamp255(i - VAL_RANGE) << 16;
}
}
public YUVConv(){
}
public int[] getRGBData(){
return pixels;
}
public void convert(YUVBuffer yuv, int xOff, int yOff, int width, int height) {
if (pixels == null){
pixels = new int[width*height];
}
// Set up starting values for YUV pointers
int YPtr = yuv.y_offset + xOff + yOff * (yuv.y_stride);
int YPtr2 = YPtr + yuv.y_stride;
int UPtr = yuv.u_offset + xOff/2 + (yOff/2)*(yuv.uv_stride);
int VPtr = yuv.v_offset + xOff/2 + (yOff/2)*(yuv.uv_stride);
int RGBPtr = 0;
int RGBPtr2 = width;
int width2 = width / 2;
int height2 = height / 2;
// Set the line step for the Y and UV planes and YPtr2
int YStep = yuv.y_stride * 2 - (width2) * 2;
int UVStep = yuv.uv_stride - (width2);
int RGBStep = width;
for (int i = 0; i < height2; i++) {
for (int j = 0; j < width2; j++) {
// groups of four pixels
int UFactor = yuv.data[UPtr++] - 128;
int VFactor = yuv.data[VPtr++] - 128;
int GFactor = UFactor * CR_DIFF_FAC + VFactor * CB_DIFF_FAC - (VAL_RANGE<<SHIFT);
UFactor = UFactor * CR_FAC + (VAL_RANGE<<SHIFT);
VFactor = VFactor * CB_FAC + (VAL_RANGE<<SHIFT);
int YVal = yuv.data[YPtr] << SHIFT;
pixels[RGBPtr] = r_tab[(YVal + VFactor)>>SHIFT] |
b_tab[(YVal + UFactor)>>SHIFT] |
g_tab[(YVal - GFactor)>>SHIFT];
YVal = yuv.data[YPtr+1] << SHIFT;
pixels[RGBPtr+1] = r_tab[(YVal + VFactor)>>SHIFT] |
b_tab[(YVal + UFactor)>>SHIFT] |
g_tab[(YVal - GFactor)>>SHIFT];
YVal = yuv.data[YPtr2] << SHIFT;
pixels[RGBPtr2] = r_tab[(YVal + VFactor)>>SHIFT] |
b_tab[(YVal + UFactor)>>SHIFT] |
g_tab[(YVal - GFactor)>>SHIFT];
YVal = yuv.data[YPtr2+1] << SHIFT;
pixels[RGBPtr2+1] = r_tab[(YVal + VFactor)>>SHIFT] |
b_tab[(YVal + UFactor)>>SHIFT] |
g_tab[(YVal - GFactor)>>SHIFT];
YPtr += 2;
YPtr2 += 2;
RGBPtr += 2;
RGBPtr2 += 2;
}
// Increment the various pointers
YPtr += YStep;
YPtr2 += YStep;
UPtr += UVStep;
VPtr += UVStep;
RGBPtr += RGBStep;
RGBPtr2 += RGBStep;
}
}
}
Loading…
Cancel
Save