From 9b9db3bd317d646791bada644499ccc4cdb363cd Mon Sep 17 00:00:00 2001 From: "jul..om" Date: Tue, 23 Oct 2012 22:11:44 +0000 Subject: [PATCH] Updates the renderers based on JOGL 2.0 (and JOAL 1.1.3) git-svn-id: https://jmonkeyengine.googlecode.com/svn/trunk@9878 75d07b2b-3a1a-0410-a2c5-0572b91ccdca --- .../com/jme/audio/joal/JoalAudioRenderer.java | 1107 ++++++++++ .../jme3/renderer/jogl/JoglGL1Renderer.java | 1226 +++++++++++ .../com/jme3/renderer/jogl/JoglRenderer.java | 1877 +++++++++++++++++ .../com/jme3/renderer/jogl/TextureUtil.java | 386 ++++ .../jme3/system/jogl/JoglAbstractDisplay.java | 182 ++ .../jogl/com/jme3/system/jogl/JoglCanvas.java | 129 ++ .../com/jme3/system/jogl/JoglContext.java | 146 ++ .../com/jme3/system/jogl/JoglDisplay.java | 307 +++ 8 files changed, 5360 insertions(+) create mode 100644 engine/src/jogl/com/jme/audio/joal/JoalAudioRenderer.java create mode 100644 engine/src/jogl/com/jme3/renderer/jogl/JoglGL1Renderer.java create mode 100644 engine/src/jogl/com/jme3/renderer/jogl/JoglRenderer.java create mode 100644 engine/src/jogl/com/jme3/renderer/jogl/TextureUtil.java create mode 100644 engine/src/jogl/com/jme3/system/jogl/JoglAbstractDisplay.java create mode 100644 engine/src/jogl/com/jme3/system/jogl/JoglCanvas.java create mode 100644 engine/src/jogl/com/jme3/system/jogl/JoglContext.java create mode 100644 engine/src/jogl/com/jme3/system/jogl/JoglDisplay.java diff --git a/engine/src/jogl/com/jme/audio/joal/JoalAudioRenderer.java b/engine/src/jogl/com/jme/audio/joal/JoalAudioRenderer.java new file mode 100644 index 000000000..b69238248 --- /dev/null +++ b/engine/src/jogl/com/jme/audio/joal/JoalAudioRenderer.java @@ -0,0 +1,1107 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package com.jme.audio.joal; + +import com.jme3.audio.AudioNode.Status; +import com.jme3.audio.*; +import com.jme3.math.Vector3f; +import com.jme3.util.BufferUtils; +import com.jme3.util.NativeObjectManager; +import com.jogamp.common.nio.Buffers; +import com.jogamp.openal.*; +import com.jogamp.openal.util.ALut; +import java.nio.ByteBuffer; +import java.nio.FloatBuffer; +import java.nio.IntBuffer; +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.logging.Level; +import java.util.logging.Logger; + +public class JoalAudioRenderer implements AudioRenderer, Runnable { + + private static final Logger logger = Logger.getLogger(com.jme3.audio.lwjgl.LwjglAudioRenderer.class.getName()); + private final NativeObjectManager objManager = new NativeObjectManager(); + // When multiplied by STREAMING_BUFFER_COUNT, will equal 44100 * 2 * 2 + // which is exactly 1 second of audio. + private static final int BUFFER_SIZE = 35280; + private static final int STREAMING_BUFFER_COUNT = 5; + private final static int MAX_NUM_CHANNELS = 64; + private IntBuffer ib = BufferUtils.createIntBuffer(1); + private final FloatBuffer fb = BufferUtils.createVector3Buffer(2); + private final ByteBuffer nativeBuf = BufferUtils.createByteBuffer(BUFFER_SIZE); + private final byte[] arrayBuf = new byte[BUFFER_SIZE]; + private int[] channels; + private AudioNode[] chanSrcs; + private int nextChan = 0; + private ArrayList freeChans = new ArrayList(); + private Listener listener; + private boolean audioDisabled = false; + private boolean supportEfx = false; + private int auxSends = 0; + private int reverbFx = -1; + private int reverbFxSlot = -1; + // Update audio 20 times per second + private static final float UPDATE_RATE = 0.05f; + private final Thread audioThread = new Thread(this, "jME3 Audio Thread"); + private final AtomicBoolean threadLock = new AtomicBoolean(false); + + private ALC alc; + private AL al; + + public JoalAudioRenderer() { + } + + public void initialize() { + if (!audioThread.isAlive()) { + audioThread.setDaemon(true); + audioThread.setPriority(Thread.NORM_PRIORITY + 1); + audioThread.start(); + } else { + throw new IllegalStateException("Initialize already called"); + } + } + + private void checkDead() { + if (audioThread.getState() == Thread.State.TERMINATED) { + throw new IllegalStateException("Audio thread is terminated"); + } + } + + public void run() { + initInThread(); + synchronized (threadLock) { + threadLock.set(true); + threadLock.notifyAll(); + } + + long updateRateNanos = (long) (UPDATE_RATE * 1000000000); + mainloop: + while (true) { + long startTime = System.nanoTime(); + + if (Thread.interrupted()) { + break; + } + + synchronized (threadLock) { + updateInThread(UPDATE_RATE); + } + + long endTime = System.nanoTime(); + long diffTime = endTime - startTime; + + if (diffTime < updateRateNanos) { + long desiredEndTime = startTime + updateRateNanos; + while (System.nanoTime() < desiredEndTime) { + try { + Thread.sleep(1); + } catch (InterruptedException ex) { + break mainloop; + } + } + } + } + + synchronized (threadLock) { + cleanupInThread(); + } + } + + public void initInThread() { + try { + ALut.alutInit(); + alc = ALFactory.getALC(); + al = ALFactory.getAL(); + + // Get handle to default device. + ALCdevice device = alc.alcOpenDevice(null); + if (device == null) { + throw new ALException("Error opening default OpenAL device"); + } + + // Get the device specifier. + String deviceName = alc.alcGetString(device, ALC.ALC_DEVICE_SPECIFIER); + if (deviceName == null) { + throw new ALException("Error getting specifier for default OpenAL device"); + } + + logger.log(Level.FINER, "Audio Device: {0}", deviceName); + logger.log(Level.FINER, "Audio Vendor: {0}", al.alGetString(ALConstants.AL_VENDOR)); + logger.log(Level.FINER, "Audio Renderer: {0}", al.alGetString(ALConstants.AL_RENDERER)); + logger.log(Level.FINER, "Audio Version: {0}", al.alGetString(ALConstants.AL_VERSION)); + + // Create audio context. + ALCcontext context = alc.alcCreateContext(device, null); + if (context == null) { + throw new ALException("Error creating OpenAL context"); + } + + // Set active context. + alc.alcMakeContextCurrent(context); + + // Check for an error. + if (alc.alcGetError(device) != ALC.ALC_NO_ERROR) { + throw new ALException("Error making OpenAL context current"); + } + + // Find maximum # of sources supported by this implementation + ArrayList channelList = new ArrayList(); + IntBuffer channelsNioBuffer = Buffers.newDirectIntBuffer(MAX_NUM_CHANNELS); + al.alGenSources(MAX_NUM_CHANNELS, channelsNioBuffer); + for (int i = 0; i < MAX_NUM_CHANNELS; i++) { + int chan = channelsNioBuffer.get(i); + if (chan != 0) { + channelList.add(chan); + } + } + + channels = new int[channelList.size()]; + for (int i = 0; i < channels.length; i++) { + channels[i] = channelList.get(i); + } + + ib = BufferUtils.createIntBuffer(channels.length); + chanSrcs = new AudioNode[channels.length]; + + logger.log(Level.INFO, "AudioRenderer supports {0} channels", channels.length); + + supportEfx = alc.alcIsExtensionPresent(device, "ALC_EXT_EFX"); + if (supportEfx) { + ib.position(0).limit(1); + alc.alcGetIntegerv(device, AL.ALC_EFX_MAJOR_VERSION, 1, ib); + int major = ib.get(0); + ib.position(0).limit(1); + alc.alcGetIntegerv(device, AL.ALC_EFX_MINOR_VERSION, 1, ib); + int minor = ib.get(0); + logger.log(Level.INFO, "Audio effect extension version: {0}.{1}", new Object[]{major, minor}); + + alc.alcGetIntegerv(device, AL.ALC_MAX_AUXILIARY_SENDS, 1, ib); + auxSends = ib.get(0); + logger.log(Level.INFO, "Audio max auxilary sends: {0}", auxSends); + + // create slot + ib.position(0).limit(1); + al.alGenAuxiliaryEffectSlots(1, ib); + reverbFxSlot = ib.get(0); + + // create effect + ib.position(0).limit(1); + al.alGenEffects(1, ib); + reverbFx = ib.get(0); + al.alEffecti(reverbFx, AL.AL_EFFECT_TYPE, AL.AL_EFFECT_REVERB); + + // attach reverb effect to effect slot + al.alAuxiliaryEffectSloti(reverbFxSlot, AL.AL_EFFECTSLOT_EFFECT, reverbFx); + } else { + logger.log(Level.WARNING, "OpenAL EFX not available! Audio effects won't work."); + } + } catch (ALException ex) { + logger.log(Level.SEVERE, "Failed to load audio library", ex); + audioDisabled = true; + } catch (UnsatisfiedLinkError ex) { + logger.log(Level.SEVERE, "Failed to load audio library", ex); + audioDisabled = true; + } + } + + public void cleanupInThread() { + if (audioDisabled) { + //FIXME + //AL.destroy(); + return; + } + + // stop any playing channels + for (int i = 0; i < chanSrcs.length; i++) { + if (chanSrcs[i] != null) { + clearChannel(i); + } + } + + // delete channel-based sources + ib.clear(); + ib.put(channels); + ib.flip(); + al.alDeleteSources(ib.limit(), ib); + + // delete audio buffers and filters + objManager.deleteAllObjects(this); + + if (supportEfx) { + ib.position(0).limit(1); + ib.put(0, reverbFx); + al.alDeleteEffects(1, ib); + + // If this is not allocated, why is it deleted? + // Commented out to fix native crash in OpenAL. + ib.position(0).limit(1); + ib.put(0, reverbFxSlot); + al.alDeleteAuxiliaryEffectSlots(1, ib); + } + + //FIXME + //AL.destroy(); + } + + public void cleanup() { + // kill audio thread + if (audioThread.isAlive()) { + audioThread.interrupt(); + } + } + + private void updateFilter(Filter f) { + int id = f.getId(); + if (id == -1) { + ib.position(0).limit(1); + al.alGenFilters(1, ib); + id = ib.get(0); + f.setId(id); + + objManager.registerForCleanup(f); + } + + if (f instanceof LowPassFilter) { + LowPassFilter lpf = (LowPassFilter) f; + al.alFilteri(id, AL.AL_FILTER_TYPE, AL.AL_FILTER_LOWPASS); + al.alFilterf(id, AL.AL_LOWPASS_GAIN, lpf.getVolume()); + al.alFilterf(id, AL.AL_LOWPASS_GAINHF, lpf.getHighFreqVolume()); + } else { + throw new UnsupportedOperationException("Filter type unsupported: " + + f.getClass().getName()); + } + + f.clearUpdateNeeded(); + } + + public void updateSourceParam(AudioNode src, AudioParam param) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + // There is a race condition in AudioNode that can + // cause this to be called for a node that has been + // detached from its channel. For example, setVolume() + // called from the render thread may see that that AudioNode + // still has a channel value but the audio thread may + // clear that channel before setVolume() gets to call + // updateSourceParam() (because the audio stopped playing + // on its own right as the volume was set). In this case, + // it should be safe to just ignore the update + if (src.getChannel() < 0) { + return; + } + + assert src.getChannel() >= 0; + + int id = channels[src.getChannel()]; + switch (param) { + case Position: + if (!src.isPositional()) { + return; + } + + Vector3f pos = src.getWorldTranslation(); + al.alSource3f(id, ALConstants.AL_POSITION, pos.x, pos.y, pos.z); + break; + case Velocity: + if (!src.isPositional()) { + return; + } + + Vector3f vel = src.getVelocity(); + al.alSource3f(id, ALConstants.AL_VELOCITY, vel.x, vel.y, vel.z); + break; + case MaxDistance: + if (!src.isPositional()) { + return; + } + + al.alSourcef(id, ALConstants.AL_MAX_DISTANCE, src.getMaxDistance()); + break; + case RefDistance: + if (!src.isPositional()) { + return; + } + + al.alSourcef(id, ALConstants.AL_REFERENCE_DISTANCE, src.getRefDistance()); + break; + case ReverbFilter: + if (!supportEfx || !src.isPositional() || !src.isReverbEnabled()) { + return; + } + + int filter = AL.AL_FILTER_NULL; + if (src.getReverbFilter() != null) { + Filter f = src.getReverbFilter(); + if (f.isUpdateNeeded()) { + updateFilter(f); + } + filter = f.getId(); + } + al.alSource3i(id, AL.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); + break; + case ReverbEnabled: + if (!supportEfx || !src.isPositional()) { + return; + } + + if (src.isReverbEnabled()) { + updateSourceParam(src, AudioParam.ReverbFilter); + } else { + al.alSource3i(id, AL.AL_AUXILIARY_SEND_FILTER, 0, 0, AL.AL_FILTER_NULL); + } + break; + case IsPositional: + if (!src.isPositional()) { + // Play in headspace + al.alSourcei(id, ALConstants.AL_SOURCE_RELATIVE, ALConstants.AL_TRUE); + al.alSource3f(id, ALConstants.AL_POSITION, 0, 0, 0); + al.alSource3f(id, ALConstants.AL_VELOCITY, 0, 0, 0); + + // Disable reverb + al.alSource3i(id, AL.AL_AUXILIARY_SEND_FILTER, 0, 0, AL.AL_FILTER_NULL); + } else { + al.alSourcei(id, ALConstants.AL_SOURCE_RELATIVE, ALConstants.AL_FALSE); + updateSourceParam(src, AudioParam.Position); + updateSourceParam(src, AudioParam.Velocity); + updateSourceParam(src, AudioParam.MaxDistance); + updateSourceParam(src, AudioParam.RefDistance); + updateSourceParam(src, AudioParam.ReverbEnabled); + } + break; + case Direction: + if (!src.isDirectional()) { + return; + } + + Vector3f dir = src.getDirection(); + al.alSource3f(id, ALConstants.AL_DIRECTION, dir.x, dir.y, dir.z); + break; + case InnerAngle: + if (!src.isDirectional()) { + return; + } + + al.alSourcef(id, ALConstants.AL_CONE_INNER_ANGLE, src.getInnerAngle()); + break; + case OuterAngle: + if (!src.isDirectional()) { + return; + } + + al.alSourcef(id, ALConstants.AL_CONE_OUTER_ANGLE, src.getOuterAngle()); + break; + case IsDirectional: + if (src.isDirectional()) { + updateSourceParam(src, AudioParam.Direction); + updateSourceParam(src, AudioParam.InnerAngle); + updateSourceParam(src, AudioParam.OuterAngle); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_GAIN, 0); + } else { + al.alSourcef(id, ALConstants.AL_CONE_INNER_ANGLE, 360); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_ANGLE, 360); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_GAIN, 1f); + } + break; + case DryFilter: + if (!supportEfx) { + return; + } + + if (src.getDryFilter() != null) { + Filter f = src.getDryFilter(); + if (f.isUpdateNeeded()) { + updateFilter(f); + + // NOTE: must re-attach filter for changes to apply. + al.alSourcei(id, AL.AL_DIRECT_FILTER, f.getId()); + } + } else { + al.alSourcei(id, AL.AL_DIRECT_FILTER, AL.AL_FILTER_NULL); + } + break; + case Looping: + if (src.isLooping()) { + if (!(src.getAudioData() instanceof AudioStream)) { + al.alSourcei(id, ALConstants.AL_LOOPING, ALConstants.AL_TRUE); + } + } else { + al.alSourcei(id, ALConstants.AL_LOOPING, ALConstants.AL_FALSE); + } + break; + case Volume: + al.alSourcef(id, ALConstants.AL_GAIN, src.getVolume()); + break; + case Pitch: + al.alSourcef(id, ALConstants.AL_PITCH, src.getPitch()); + break; + } + } + } + + private void setSourceParams(int id, AudioNode src, boolean forceNonLoop) { + if (src.isPositional()) { + Vector3f pos = src.getWorldTranslation(); + Vector3f vel = src.getVelocity(); + al.alSource3f(id, ALConstants.AL_POSITION, pos.x, pos.y, pos.z); + al.alSource3f(id, ALConstants.AL_VELOCITY, vel.x, vel.y, vel.z); + al.alSourcef(id, ALConstants.AL_MAX_DISTANCE, src.getMaxDistance()); + al.alSourcef(id, ALConstants.AL_REFERENCE_DISTANCE, src.getRefDistance()); + al.alSourcei(id, ALConstants.AL_SOURCE_RELATIVE, ALConstants.AL_FALSE); + + if (src.isReverbEnabled() && supportEfx) { + int filter = AL.AL_FILTER_NULL; + if (src.getReverbFilter() != null) { + Filter f = src.getReverbFilter(); + if (f.isUpdateNeeded()) { + updateFilter(f); + } + filter = f.getId(); + } + al.alSource3i(id, AL.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter); + } + } else { + // play in headspace + al.alSourcei(id, ALConstants.AL_SOURCE_RELATIVE, ALConstants.AL_TRUE); + al.alSource3f(id, ALConstants.AL_POSITION, 0, 0, 0); + al.alSource3f(id, ALConstants.AL_VELOCITY, 0, 0, 0); + } + + if (src.getDryFilter() != null && supportEfx) { + Filter f = src.getDryFilter(); + if (f.isUpdateNeeded()) { + updateFilter(f); + + // NOTE: must re-attach filter for changes to apply. + al.alSourcei(id, AL.AL_DIRECT_FILTER, f.getId()); + } + } + + if (forceNonLoop) { + al.alSourcei(id, ALConstants.AL_LOOPING, ALConstants.AL_FALSE); + } else { + al.alSourcei(id, ALConstants.AL_LOOPING, src.isLooping() ? ALConstants.AL_TRUE : ALConstants.AL_FALSE); + } + al.alSourcef(id, ALConstants.AL_GAIN, src.getVolume()); + al.alSourcef(id, ALConstants.AL_PITCH, src.getPitch()); + al.alSourcef(id, AL.AL_SEC_OFFSET, src.getTimeOffset()); + + if (src.isDirectional()) { + Vector3f dir = src.getDirection(); + al.alSource3f(id, ALConstants.AL_DIRECTION, dir.x, dir.y, dir.z); + al.alSourcef(id, ALConstants.AL_CONE_INNER_ANGLE, src.getInnerAngle()); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_ANGLE, src.getOuterAngle()); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_GAIN, 0); + } else { + al.alSourcef(id, ALConstants.AL_CONE_INNER_ANGLE, 360); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_ANGLE, 360); + al.alSourcef(id, ALConstants.AL_CONE_OUTER_GAIN, 1f); + } + } + + public void updateListenerParam(Listener listener, ListenerParam param) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + switch (param) { + case Position: + Vector3f pos = listener.getLocation(); + al.alListener3f(ALConstants.AL_POSITION, pos.x, pos.y, pos.z); + break; + case Rotation: + Vector3f dir = listener.getDirection(); + Vector3f up = listener.getUp(); + fb.rewind(); + fb.put(dir.x).put(dir.y).put(dir.z); + fb.put(up.x).put(up.y).put(up.z); + fb.flip(); + al.alListenerfv(ALConstants.AL_ORIENTATION, fb); + break; + case Velocity: + Vector3f vel = listener.getVelocity(); + al.alListener3f(ALConstants.AL_VELOCITY, vel.x, vel.y, vel.z); + break; + case Volume: + al.alListenerf(ALConstants.AL_GAIN, listener.getVolume()); + break; + } + } + } + + private void setListenerParams(Listener listener) { + Vector3f pos = listener.getLocation(); + Vector3f vel = listener.getVelocity(); + Vector3f dir = listener.getDirection(); + Vector3f up = listener.getUp(); + + al.alListener3f(ALConstants.AL_POSITION, pos.x, pos.y, pos.z); + al.alListener3f(ALConstants.AL_VELOCITY, vel.x, vel.y, vel.z); + fb.rewind(); + fb.put(dir.x).put(dir.y).put(dir.z); + fb.put(up.x).put(up.y).put(up.z); + fb.flip(); + al.alListenerfv(ALConstants.AL_ORIENTATION, fb); + al.alListenerf(ALConstants.AL_GAIN, listener.getVolume()); + } + + private int newChannel() { + if (freeChans.size() > 0) { + return freeChans.remove(0); + } else if (nextChan < channels.length) { + return nextChan++; + } else { + return -1; + } + } + + private void freeChannel(int index) { + if (index == nextChan - 1) { + nextChan--; + } else { + freeChans.add(index); + } + } + + public void setEnvironment(Environment env) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled || !supportEfx) { + return; + } + + al.alEffectf(reverbFx, AL.AL_REVERB_DENSITY, env.getDensity()); + al.alEffectf(reverbFx, AL.AL_REVERB_DIFFUSION, env.getDiffusion()); + al.alEffectf(reverbFx, AL.AL_REVERB_GAIN, env.getGain()); + al.alEffectf(reverbFx, AL.AL_REVERB_GAINHF, env.getGainHf()); + al.alEffectf(reverbFx, AL.AL_REVERB_DECAY_TIME, env.getDecayTime()); + al.alEffectf(reverbFx, AL.AL_REVERB_DECAY_HFRATIO, env.getDecayHFRatio()); + al.alEffectf(reverbFx, AL.AL_REVERB_REFLECTIONS_GAIN, env.getReflectGain()); + al.alEffectf(reverbFx, AL.AL_REVERB_REFLECTIONS_DELAY, env.getReflectDelay()); + al.alEffectf(reverbFx, AL.AL_REVERB_LATE_REVERB_GAIN, env.getLateReverbGain()); + al.alEffectf(reverbFx, AL.AL_REVERB_LATE_REVERB_DELAY, env.getLateReverbDelay()); + al.alEffectf(reverbFx, AL.AL_REVERB_AIR_ABSORPTION_GAINHF, env.getAirAbsorbGainHf()); + al.alEffectf(reverbFx, AL.AL_REVERB_ROOM_ROLLOFF_FACTOR, env.getRoomRolloffFactor()); + + // attach effect to slot + al.alAuxiliaryEffectSloti(reverbFxSlot, AL.AL_EFFECTSLOT_EFFECT, reverbFx); + } + } + + private boolean fillBuffer(AudioStream stream, int id) { + int size = 0; + int result; + + while (size < arrayBuf.length) { + result = stream.readSamples(arrayBuf, size, arrayBuf.length - size); + + if (result > 0) { + size += result; + } else { + break; + } + } + + if (size == 0) { + return false; + } + + nativeBuf.clear(); + nativeBuf.put(arrayBuf, 0, size); + nativeBuf.flip(); + + al.alBufferData(id, convertFormat(stream), nativeBuf, size, stream.getSampleRate()); + + return true; + } + + private boolean fillStreamingSource(int sourceId, AudioStream stream) { + if (!stream.isOpen()) { + return false; + } + + boolean active = true; + al.alGetSourcei(sourceId, AL.AL_BUFFERS_PROCESSED, ib); + int processed = ib.get(0); + +// while((processed--) != 0){ + if (processed > 0) { + int buffer; + + ib.position(0).limit(1); + al.alSourceUnqueueBuffers(sourceId, 1, ib); + buffer = ib.get(0); + + active = fillBuffer(stream, buffer); + + ib.position(0).limit(1); + ib.put(0, buffer); + al.alSourceQueueBuffers(sourceId, 1, ib); + } + + if (!active && stream.isOpen()) { + stream.close(); + } + + return active; + } + + private boolean attachStreamToSource(int sourceId, AudioStream stream) { + boolean active = true; + for (int id : stream.getIds()) { + active = fillBuffer(stream, id); + ib.position(0).limit(1); + ib.put(id).flip(); + al.alSourceQueueBuffers(sourceId, 1, ib); + } + return active; + } + + private boolean attachBufferToSource(int sourceId, AudioBuffer buffer) { + al.alSourcei(sourceId, ALConstants.AL_BUFFER, buffer.getId()); + return true; + } + + private boolean attachAudioToSource(int sourceId, AudioData data) { + if (data instanceof AudioBuffer) { + return attachBufferToSource(sourceId, (AudioBuffer) data); + } else if (data instanceof AudioStream) { + return attachStreamToSource(sourceId, (AudioStream) data); + } + throw new UnsupportedOperationException(); + } + + private void clearChannel(int index) { + // make room at this channel + if (chanSrcs[index] != null) { + AudioNode src = chanSrcs[index]; + + int sourceId = channels[index]; + al.alSourceStop(sourceId); + + if (src.getAudioData() instanceof AudioStream) { + AudioStream str = (AudioStream) src.getAudioData(); + ib.position(0).limit(STREAMING_BUFFER_COUNT); + ib.put(str.getIds()).flip(); + al.alSourceUnqueueBuffers(sourceId, 1, ib); + } else if (src.getAudioData() instanceof AudioBuffer) { + al.alSourcei(sourceId, AL.AL_BUFFER, 0); + } + + if (src.getDryFilter() != null && supportEfx) { + // detach filter + al.alSourcei(sourceId, AL.AL_DIRECT_FILTER, AL.AL_FILTER_NULL); + } + if (src.isPositional()) { + AudioNode pas = (AudioNode) src; + if (pas.isReverbEnabled() && supportEfx) { + al.alSource3i(sourceId, AL.AL_AUXILIARY_SEND_FILTER, 0, 0, AL.AL_FILTER_NULL); + } + } + + chanSrcs[index] = null; + } + } + + public void update(float tpf) { + // does nothing + } + + public void updateInThread(float tpf) { + if (audioDisabled) { + return; + } + + for (int i = 0; i < channels.length; i++) { + AudioNode src = chanSrcs[i]; + if (src == null) { + continue; + } + + int sourceId = channels[i]; + + // is the source bound to this channel + // if false, it's an instanced playback + boolean boundSource = i == src.getChannel(); + + // source's data is streaming + boolean streaming = src.getAudioData() instanceof AudioStream; + + // only buffered sources can be bound + assert (boundSource && streaming) || (!streaming); + + ib.position(0).limit(1); + al.alGetSourcei(sourceId, AL.AL_SOURCE_STATE, ib); + int state = ib.get(0); + boolean wantPlaying = src.getStatus() == AudioNode.Status.Playing; + boolean stopped = state == ALConstants.AL_STOPPED; + + if (streaming && wantPlaying) { + AudioStream stream = (AudioStream) src.getAudioData(); + if (stream.isOpen()) { + fillStreamingSource(sourceId, stream); + if (stopped) { + al.alSourcePlay(sourceId); + } + } else { + if (stopped) { + // became inactive + src.setStatus(AudioNode.Status.Stopped); + src.setChannel(-1); + clearChannel(i); + freeChannel(i); + + // And free the audio since it cannot be + // played again anyway. + deleteAudioData(stream); + } + } + } else if (!streaming) { + boolean paused = state == ALConstants.AL_PAUSED; + + // make sure OAL pause state & source state coincide + assert (src.getStatus() == AudioNode.Status.Paused && paused) || (!paused); + + if (stopped) { + if (boundSource) { + src.setStatus(AudioNode.Status.Stopped); + src.setChannel(-1); + } + clearChannel(i); + freeChannel(i); + } + } + } + + // Delete any unused objects. + objManager.deleteUnused(this); + } + + public void setListener(Listener listener) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + if (this.listener != null) { + // previous listener no longer associated with current + // renderer + this.listener.setRenderer(null); + } + + this.listener = listener; + this.listener.setRenderer(this); + setListenerParams(listener); + } + } + + public void playSourceInstance(AudioNode src) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + if (src.getAudioData() instanceof AudioStream) { + throw new UnsupportedOperationException( + "Cannot play instances " + + "of audio streams. Use playSource() instead."); + } + + if (src.getAudioData().isUpdateNeeded()) { + updateAudioData(src.getAudioData()); + } + + // create a new index for an audio-channel + int index = newChannel(); + if (index == -1) { + return; + } + + int sourceId = channels[index]; + + clearChannel(index); + + // set parameters, like position and max distance + setSourceParams(sourceId, src, true); + attachAudioToSource(sourceId, src.getAudioData()); + chanSrcs[index] = src; + + // play the channel + al.alSourcePlay(sourceId); + } + } + + public void playSource(AudioNode src) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + //assert src.getStatus() == Status.Stopped || src.getChannel() == -1; + + if (src.getStatus() == AudioNode.Status.Playing) { + return; + } else if (src.getStatus() == AudioNode.Status.Stopped) { + + // allocate channel to this source + int index = newChannel(); + if (index == -1) { + logger.log(Level.WARNING, "No channel available to play {0}", src); + return; + } + clearChannel(index); + src.setChannel(index); + + AudioData data = src.getAudioData(); + if (data.isUpdateNeeded()) { + updateAudioData(data); + } + + chanSrcs[index] = src; + setSourceParams(channels[index], src, false); + attachAudioToSource(channels[index], data); + } + + al.alSourcePlay(channels[src.getChannel()]); + src.setStatus(AudioNode.Status.Playing); + } + } + + public void pauseSource(AudioNode src) { + checkDead(); + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + if (src.getStatus() == AudioNode.Status.Playing) { + assert src.getChannel() != -1; + + al.alSourcePause(channels[src.getChannel()]); + src.setStatus(AudioNode.Status.Paused); + } + } + } + + public void stopSource(AudioNode src) { + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + if (src.getStatus() != AudioNode.Status.Stopped) { + int chan = src.getChannel(); + assert chan != -1; // if it's not stopped, must have id + + src.setStatus(AudioNode.Status.Stopped); + src.setChannel(-1); + clearChannel(chan); + freeChannel(chan); + + if (src.getAudioData() instanceof AudioStream) { + AudioStream stream = (AudioStream) src.getAudioData(); + if (stream.isOpen()) { + stream.close(); + } + + // And free the audio since it cannot be + // played again anyway. + deleteAudioData(src.getAudioData()); + } + } + } + } + + private int convertFormat(AudioData ad) { + switch (ad.getBitsPerSample()) { + case 8: + if (ad.getChannels() == 1) { + return ALConstants.AL_FORMAT_MONO8; + } else if (ad.getChannels() == 2) { + return ALConstants.AL_FORMAT_STEREO8; + } + + break; + case 16: + if (ad.getChannels() == 1) { + return ALConstants.AL_FORMAT_MONO16; + } else { + return ALConstants.AL_FORMAT_STEREO16; + } + } + throw new UnsupportedOperationException("Unsupported channels/bits combination: " + + "bits=" + ad.getBitsPerSample() + ", channels=" + ad.getChannels()); + } + + private void updateAudioBuffer(AudioBuffer ab) { + int id = ab.getId(); + if (ab.getId() == -1) { + ib.position(0).limit(1); + al.alGenBuffers(ib.limit(), ib); + id = ib.get(0); + ab.setId(id); + + objManager.registerForCleanup(ab); + } + + ab.getData().clear(); + al.alBufferData(id, convertFormat(ab), ab.getData(), ab.getData().remaining(), ab.getSampleRate()); + ab.clearUpdateNeeded(); + } + + private void updateAudioStream(AudioStream as) { + if (as.getIds() != null) { + deleteAudioData(as); + } + + int[] ids = new int[STREAMING_BUFFER_COUNT]; + ib.position(0).limit(STREAMING_BUFFER_COUNT); + al.alGenBuffers(ib.limit(), ib); + ib.position(0).limit(STREAMING_BUFFER_COUNT); + ib.get(ids); + + // Not registered with object manager. + // AudioStreams can be handled without object manager + // since their lifecycle is known to the audio renderer. + + as.setIds(ids); + as.clearUpdateNeeded(); + } + + private void updateAudioData(AudioData ad) { + if (ad instanceof AudioBuffer) { + updateAudioBuffer((AudioBuffer) ad); + } else if (ad instanceof AudioStream) { + updateAudioStream((AudioStream) ad); + } + } + + public void deleteFilter(Filter filter) { + int id = filter.getId(); + if (id != -1) { + ib.put(0, id); + ib.position(0).limit(1); + al.alDeleteFilters(1, ib); + } + } + + public void deleteAudioData(AudioData ad) { + synchronized (threadLock) { + while (!threadLock.get()) { + try { + threadLock.wait(); + } catch (InterruptedException ex) { + } + } + if (audioDisabled) { + return; + } + + if (ad instanceof AudioBuffer) { + AudioBuffer ab = (AudioBuffer) ad; + int id = ab.getId(); + if (id != -1) { + ib.put(0, id); + ib.position(0).limit(1); + al.alDeleteBuffers(ib.limit(), ib); + ab.resetObject(); + } + } else if (ad instanceof AudioStream) { + AudioStream as = (AudioStream) ad; + int[] ids = as.getIds(); + if (ids != null) { + ib.clear(); + ib.put(ids).flip(); + al.alDeleteBuffers(ib.limit(), ib); + as.resetObject(); + } + } + } + } +} \ No newline at end of file diff --git a/engine/src/jogl/com/jme3/renderer/jogl/JoglGL1Renderer.java b/engine/src/jogl/com/jme3/renderer/jogl/JoglGL1Renderer.java new file mode 100644 index 000000000..246512183 --- /dev/null +++ b/engine/src/jogl/com/jme3/renderer/jogl/JoglGL1Renderer.java @@ -0,0 +1,1226 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package com.jme3.renderer.jogl; + +import com.jme3.light.*; +import com.jme3.material.FixedFuncBinding; +import com.jme3.material.RenderState; +import com.jme3.math.ColorRGBA; +import com.jme3.math.FastMath; +import com.jme3.math.Matrix4f; +import com.jme3.math.Vector3f; +import com.jme3.renderer.Caps; +import com.jme3.renderer.GL1Renderer; +import com.jme3.renderer.RenderContext; +import com.jme3.renderer.Statistics; +import com.jme3.scene.Mesh; +import com.jme3.scene.Mesh.Mode; +import com.jme3.scene.VertexBuffer; +import com.jme3.scene.VertexBuffer.Type; +import com.jme3.scene.VertexBuffer.Usage; +import com.jme3.shader.Shader; +import com.jme3.shader.Shader.ShaderSource; +import com.jme3.texture.FrameBuffer; +import com.jme3.texture.Image; +import com.jme3.texture.Texture; +import com.jme3.texture.Texture.WrapAxis; +import com.jme3.util.BufferUtils; +import com.jme3.util.NativeObjectManager; +import java.nio.*; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.media.opengl.*; +import jme3tools.converters.MipMapGenerator; + +public class JoglGL1Renderer implements GL1Renderer { + + private static final Logger logger = Logger.getLogger(JoglRenderer.class.getName()); + private final ByteBuffer nameBuf = BufferUtils.createByteBuffer(250); + private final StringBuilder stringBuf = new StringBuilder(250); + private final IntBuffer ib1 = BufferUtils.createIntBuffer(1); + private final IntBuffer intBuf16 = BufferUtils.createIntBuffer(16); + private final FloatBuffer fb16 = BufferUtils.createFloatBuffer(16); + private final FloatBuffer fb4Null = BufferUtils.createFloatBuffer(4); + private final RenderContext context = new RenderContext(); + private final NativeObjectManager objManager = new NativeObjectManager(); + private final EnumSet caps = EnumSet.noneOf(Caps.class); + private int maxTexSize; + private int maxCubeTexSize; + private int maxVertCount; + private int maxTriCount; + private int maxLights; + private boolean gl12 = false; + private final Statistics statistics = new Statistics(); + private int vpX, vpY, vpW, vpH; + private int clipX, clipY, clipW, clipH; + + private Matrix4f worldMatrix = new Matrix4f(); + private Matrix4f viewMatrix = new Matrix4f(); + + private ArrayList lightList = new ArrayList(8); + private ColorRGBA materialAmbientColor = new ColorRGBA(); + private Vector3f tempVec = new Vector3f(); + + protected void updateNameBuffer() { + int len = stringBuf.length(); + + nameBuf.position(0); + nameBuf.limit(len); + for (int i = 0; i < len; i++) { + nameBuf.put((byte) stringBuf.charAt(i)); + } + + nameBuf.rewind(); + } + + public Statistics getStatistics() { + return statistics; + } + + public EnumSet getCaps() { + return caps; + } + + public void initialize() { + GL gl = GLContext.getCurrentGL(); + if (gl.isExtensionAvailable("GL_VERSION_1_2")){ + gl12 = true; + } + + // Default values for certain GL state. + gl.getGL2().glShadeModel(GL2.GL_SMOOTH); + gl.getGL2().glColorMaterial(GL2.GL_FRONT_AND_BACK, GL2.GL_DIFFUSE); + gl.glHint(GL2.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST); + + // Enable rescaling/normaling of normal vectors. + // Fixes lighting issues with scaled models. + if (gl12){ + gl.glEnable(GL2.GL_RESCALE_NORMAL); + }else{ + gl.glEnable(GL2.GL_NORMALIZE); + } + + if (gl.isExtensionAvailable("GL_ARB_texture_non_power_of_two")) { + caps.add(Caps.NonPowerOfTwoTextures); + } else { + logger.log(Level.WARNING, "Your graphics card does not " + + "support non-power-of-2 textures. " + + "Some features might not work."); + } + + gl.glGetIntegerv(GL2.GL_MAX_LIGHTS, ib1); + maxLights = ib1.get(0); + + } + + public void invalidateState() { + context.reset(); + } + + public void resetGLObjects() { + logger.log(Level.INFO, "Reseting objects and invalidating state"); + objManager.resetObjects(); + statistics.clearMemory(); + invalidateState(); + } + + public void cleanup() { + logger.log(Level.INFO, "Deleting objects and invalidating state"); + objManager.deleteAllObjects(this); + statistics.clearMemory(); + invalidateState(); + } + + public void setDepthRange(float start, float end) { + GL gl = GLContext.getCurrentGL(); + gl.getGL2().glDepthRange(start, end); + } + + public void clearBuffers(boolean color, boolean depth, boolean stencil) { + GL gl = GLContext.getCurrentGL(); + int bits = 0; + if (color) { + //See explanations of the depth below, we must enable color write to be able to clear the color buffer + if (context.colorWriteEnabled == false) { + gl.glColorMask(true, true, true, true); + context.colorWriteEnabled = true; + } + bits = GL.GL_COLOR_BUFFER_BIT; + } + if (depth) { + + //glClear(GL_DEPTH_BUFFER_BIT) seems to not work when glDepthMask is false + //here s some link on openl board + //http://www.opengl.org/discussion_boards/ubbthreads.php?ubb=showflat&Number=257223 + //if depth clear is requested, we enable the depthMask + if (context.depthWriteEnabled == false) { + gl.glDepthMask(true); + context.depthWriteEnabled = true; + } + bits |= GL.GL_DEPTH_BUFFER_BIT; + } + if (stencil) { + bits |= GL.GL_STENCIL_BUFFER_BIT; + } + if (bits != 0) { + gl.glClear(bits); + } + } + + public void setBackgroundColor(ColorRGBA color) { + GL gl = GLContext.getCurrentGL(); + gl.glClearColor(color.r, color.g, color.b, color.a); + } + + private void setMaterialColor(int type, ColorRGBA color, ColorRGBA defaultColor) { + GL gl = GLContext.getCurrentGL(); + if (color != null){ + fb16.put(color.r).put(color.g).put(color.b).put(color.a).flip(); + }else{ + fb16.put(defaultColor.r).put(defaultColor.g).put(defaultColor.b).put(defaultColor.a).flip(); + } + gl.getGL2().glMaterialfv(GL.GL_FRONT_AND_BACK, type, fb16); + } + + /** + * Applies fixed function bindings from the context to OpenGL + */ + private void applyFixedFuncBindings(boolean forLighting){ + GL gl = GLContext.getCurrentGL(); + if (forLighting) { + gl.getGL2().glMaterialf(GL.GL_FRONT_AND_BACK, GL2.GL_SHININESS, context.shininess); + setMaterialColor(GL2.GL_AMBIENT, context.ambient, ColorRGBA.DarkGray); + setMaterialColor(GL2.GL_DIFFUSE, context.diffuse, ColorRGBA.White); + setMaterialColor(GL2.GL_SPECULAR, context.specular, ColorRGBA.Black); + + if (context.useVertexColor) { + gl.glEnable(GL2.GL_COLOR_MATERIAL); + } else { + gl.glDisable(GL2.GL_COLOR_MATERIAL); + } + } else { + // Ignore other values as they have no effect when + // GL_LIGHTING is disabled. + ColorRGBA color = context.color; + if (color != null) { + gl.getGL2().glColor4f(color.r, color.g, color.b, color.a); + } else { + gl.getGL2().glColor4f(1, 1, 1, 1); + } + } + if (context.alphaTestFallOff > 0f) { + gl.glEnable(GL2.GL_ALPHA_TEST); + gl.getGL2().glAlphaFunc(GL.GL_GREATER, context.alphaTestFallOff); + } else { + gl.glDisable(GL2.GL_ALPHA_TEST); + } + } + + /** + * Reset fixed function bindings to default values. + */ + private void resetFixedFuncBindings(){ + context.alphaTestFallOff = 0f; // zero means disable alpha test! + context.color = null; + context.ambient = null; + context.diffuse = null; + context.specular = null; + context.shininess = 0; + context.useVertexColor = false; + } + + public void setFixedFuncBinding(FixedFuncBinding ffBinding, Object val) { + switch (ffBinding) { + case Color: + context.color = (ColorRGBA) val; + break; + case MaterialAmbient: + context.ambient = (ColorRGBA) val; + break; + case MaterialDiffuse: + context.diffuse = (ColorRGBA) val; + break; + case MaterialSpecular: + context.specular = (ColorRGBA) val; + break; + case MaterialShininess: + context.shininess = (Float) val; + break; + case UseVertexColor: + context.useVertexColor = (Boolean) val; + break; + case AlphaTestFallOff: + context.alphaTestFallOff = (Float) val; + break; + } + } + + public void applyRenderState(RenderState state) { + GL gl = GLContext.getCurrentGL(); + if (state.isWireframe() && !context.wireframe) { + gl.getGL2().glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_LINE); + context.wireframe = true; + } else if (!state.isWireframe() && context.wireframe) { + gl.getGL2().glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_FILL); + context.wireframe = false; + } + + if (state.isDepthTest() && !context.depthTestEnabled) { + gl.getGL2().glEnable(GL.GL_DEPTH_TEST); + gl.getGL2().glDepthFunc(GL.GL_LEQUAL); + context.depthTestEnabled = true; + } else if (!state.isDepthTest() && context.depthTestEnabled) { + gl.getGL2().glDisable(GL.GL_DEPTH_TEST); + context.depthTestEnabled = false; + } + + if (state.isAlphaTest()) { + setFixedFuncBinding(FixedFuncBinding.AlphaTestFallOff, state.getAlphaFallOff()); + } else { + setFixedFuncBinding(FixedFuncBinding.AlphaTestFallOff, 0f); // disable it + } + + if (state.isDepthWrite() && !context.depthWriteEnabled) { + gl.getGL2().glDepthMask(true); + context.depthWriteEnabled = true; + } else if (!state.isDepthWrite() && context.depthWriteEnabled) { + gl.getGL2().glDepthMask(false); + context.depthWriteEnabled = false; + } + + if (state.isColorWrite() && !context.colorWriteEnabled) { + gl.getGL2().glColorMask(true, true, true, true); + context.colorWriteEnabled = true; + } else if (!state.isColorWrite() && context.colorWriteEnabled) { + gl.getGL2().glColorMask(false, false, false, false); + context.colorWriteEnabled = false; + } + + if (state.isPointSprite()) { + logger.log(Level.WARNING, "Point Sprite unsupported!"); + } + + if (state.isPolyOffset()) { + if (!context.polyOffsetEnabled) { + gl.glEnable(GL.GL_POLYGON_OFFSET_FILL); + gl.getGL2().glPolygonOffset(state.getPolyOffsetFactor(), + state.getPolyOffsetUnits()); + context.polyOffsetEnabled = true; + context.polyOffsetFactor = state.getPolyOffsetFactor(); + context.polyOffsetUnits = state.getPolyOffsetUnits(); + } else { + if (state.getPolyOffsetFactor() != context.polyOffsetFactor + || state.getPolyOffsetUnits() != context.polyOffsetUnits) { + gl.getGL2().glPolygonOffset(state.getPolyOffsetFactor(), + state.getPolyOffsetUnits()); + context.polyOffsetFactor = state.getPolyOffsetFactor(); + context.polyOffsetUnits = state.getPolyOffsetUnits(); + } + } + } else { + if (context.polyOffsetEnabled) { + gl.glDisable(GL.GL_POLYGON_OFFSET_FILL); + context.polyOffsetEnabled = false; + context.polyOffsetFactor = 0; + context.polyOffsetUnits = 0; + } + } + if (state.getFaceCullMode() != context.cullMode) { + if (state.getFaceCullMode() == RenderState.FaceCullMode.Off) { + gl.glDisable(GL.GL_CULL_FACE); + } else { + gl.glEnable(GL.GL_CULL_FACE); + } + + switch (state.getFaceCullMode()) { + case Off: + break; + case Back: + gl.glCullFace(GL.GL_BACK); + break; + case Front: + gl.glCullFace(GL.GL_FRONT); + break; + case FrontAndBack: + gl.glCullFace(GL.GL_FRONT_AND_BACK); + break; + default: + throw new UnsupportedOperationException("Unrecognized face cull mode: " + + state.getFaceCullMode()); + } + + context.cullMode = state.getFaceCullMode(); + } + + if (state.getBlendMode() != context.blendMode) { + if (state.getBlendMode() == RenderState.BlendMode.Off) { + gl.glDisable(GL.GL_BLEND); + } else { + gl.glEnable(GL.GL_BLEND); + switch (state.getBlendMode()) { + case Off: + break; + case Additive: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE); + break; + case AlphaAdditive: + gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE); + break; + case Color: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_COLOR); + break; + case Alpha: + gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); + break; + case PremultAlpha: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_ALPHA); + break; + case Modulate: + gl.glBlendFunc(GL.GL_DST_COLOR, GL.GL_ZERO); + break; + case ModulateX2: + gl.glBlendFunc(GL.GL_DST_COLOR, GL.GL_SRC_COLOR); + break; + default: + throw new UnsupportedOperationException("Unrecognized blend mode: " + + state.getBlendMode()); + } + } + + context.blendMode = state.getBlendMode(); + } + + if (state.isStencilTest()) { + throw new UnsupportedOperationException("OpenGL 1.1 doesn't support two sided stencil operations."); + } + + } + + public void setViewPort(int x, int y, int w, int h) { + if (x != vpX || vpY != y || vpW != w || vpH != h) { + GL gl = GLContext.getCurrentGL(); + gl.glViewport(x, y, w, h); + vpX = x; + vpY = y; + vpW = w; + vpH = h; + } + } + + public void setClipRect(int x, int y, int width, int height) { + GL gl = GLContext.getCurrentGL(); + if (!context.clipRectEnabled) { + gl.glEnable(GL.GL_SCISSOR_TEST); + context.clipRectEnabled = true; + } + if (clipX != x || clipY != y || clipW != width || clipH != height) { + gl.glScissor(x, y, width, height); + clipX = x; + clipY = y; + clipW = width; + clipH = height; + } + } + + public void clearClipRect() { + GL gl = GLContext.getCurrentGL(); + if (context.clipRectEnabled) { + gl.glDisable(GL.GL_SCISSOR_TEST); + context.clipRectEnabled = false; + + clipX = 0; + clipY = 0; + clipW = 0; + clipH = 0; + } + } + + public void onFrame() { + objManager.deleteUnused(this); +// statistics.clearFrame(); + } + + private FloatBuffer storeMatrix(Matrix4f matrix, FloatBuffer store) { + store.clear(); + matrix.fillFloatBuffer(store, true); + store.clear(); + return store; + } + + private void setModelView(Matrix4f modelMatrix, Matrix4f viewMatrix){ + GL gl = GLContext.getCurrentGL(); + if (context.matrixMode != GL2.GL_MODELVIEW) { + gl.getGL2().glMatrixMode(GL2.GL_MODELVIEW); + context.matrixMode = GL2.GL_MODELVIEW; + } + + gl.getGL2().glLoadMatrixf(storeMatrix(viewMatrix, fb16)); + gl.getGL2().glMultMatrixf(storeMatrix(modelMatrix, fb16)); + } + + private void setProjection(Matrix4f projMatrix){ + GL gl = GLContext.getCurrentGL(); + if (context.matrixMode != GL2.GL_PROJECTION) { + gl.getGL2().glMatrixMode(GL2.GL_PROJECTION); + context.matrixMode = GL2.GL_PROJECTION; + } + + gl.getGL2().glLoadMatrixf(storeMatrix(projMatrix, fb16)); + } + + public void setWorldMatrix(Matrix4f worldMatrix) { + this.worldMatrix.set(worldMatrix); + } + + public void setViewProjectionMatrices(Matrix4f viewMatrix, Matrix4f projMatrix) { + this.viewMatrix.set(viewMatrix); + setProjection(projMatrix); + } + + public void setLighting(LightList list) { + GL gl = GLContext.getCurrentGL(); + // XXX: This is abuse of setLighting() to + // apply fixed function bindings + // and do other book keeping. + if (list == null || list.size() == 0){ + gl.glDisable(GL2.GL_LIGHTING); + applyFixedFuncBindings(false); + setModelView(worldMatrix, viewMatrix); + return; + } + + // Number of lights set previously + int numLightsSetPrev = lightList.size(); + + // If more than maxLights are defined, they will be ignored. + // The GL1 renderer is not permitted to crash due to a + // GL1 limitation. It must render anything that the GL2 renderer + // can render (even incorrectly). + lightList.clear(); + materialAmbientColor.set(0, 0, 0, 0); + + for (int i = 0; i < list.size(); i++){ + Light l = list.get(i); + if (l.getType() == Light.Type.Ambient){ + // Gather + materialAmbientColor.addLocal(l.getColor()); + }else{ + // Add to list + lightList.add(l); + + // Once maximum lights reached, exit loop. + if (lightList.size() >= maxLights){ + break; + } + } + } + + applyFixedFuncBindings(true); + + gl.glEnable(GL2.GL_LIGHTING); + + fb16.clear(); + fb16.put(materialAmbientColor.r) + .put(materialAmbientColor.g) + .put(materialAmbientColor.b) + .put(1).flip(); + + gl.getGL2().glLightModelfv(GL2.GL_LIGHT_MODEL_AMBIENT, fb16); + + if (context.matrixMode != GL2.GL_MODELVIEW) { + gl.getGL2().glMatrixMode(GL2.GL_MODELVIEW); + context.matrixMode = GL2.GL_MODELVIEW; + } + // Lights are already in world space, so just convert + // them to view space. + gl.getGL2().glLoadMatrixf(storeMatrix(viewMatrix, fb16)); + + for (int i = 0; i < lightList.size(); i++){ + int glLightIndex = GL2.GL_LIGHT0 + i; + Light light = lightList.get(i); + Light.Type lightType = light.getType(); + ColorRGBA col = light.getColor(); + Vector3f pos; + + // Enable the light + gl.glEnable(glLightIndex); + + // OGL spec states default value for light ambient is black + switch (lightType){ + case Directional: + DirectionalLight dLight = (DirectionalLight) light; + + fb16.clear(); + fb16.put(col.r).put(col.g).put(col.b).put(col.a).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_DIFFUSE, fb16); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_SPECULAR, fb16); + + pos = tempVec.set(dLight.getDirection()).negateLocal().normalizeLocal(); + fb16.clear(); + fb16.put(pos.x).put(pos.y).put(pos.z).put(0.0f).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_POSITION, fb16); + gl.getGL2().glLightf(glLightIndex, GL2.GL_SPOT_CUTOFF, 180); + break; + case Point: + PointLight pLight = (PointLight) light; + + fb16.clear(); + fb16.put(col.r).put(col.g).put(col.b).put(col.a).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_DIFFUSE, fb16); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_SPECULAR, fb16); + + pos = pLight.getPosition(); + fb16.clear(); + fb16.put(pos.x).put(pos.y).put(pos.z).put(1.0f).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_POSITION, fb16); + gl.getGL2().glLightf(glLightIndex, GL2.GL_SPOT_CUTOFF, 180); + + if (pLight.getRadius() > 0) { + // Note: this doesn't follow the same attenuation model + // as the one used in the lighting shader. + gl.getGL2().glLightf(glLightIndex, GL2.GL_CONSTANT_ATTENUATION, 1); + gl.getGL2().glLightf(glLightIndex, GL2.GL_LINEAR_ATTENUATION, pLight.getInvRadius() * 2); + gl.getGL2().glLightf(glLightIndex, GL2.GL_QUADRATIC_ATTENUATION, pLight.getInvRadius() * pLight.getInvRadius()); + }else{ + gl.getGL2().glLightf(glLightIndex, GL2.GL_CONSTANT_ATTENUATION, 1); + gl.getGL2().glLightf(glLightIndex, GL2.GL_LINEAR_ATTENUATION, 0); + gl.getGL2().glLightf(glLightIndex, GL2.GL_QUADRATIC_ATTENUATION, 0); + } + + break; + case Spot: + SpotLight sLight = (SpotLight) light; + + fb16.clear(); + fb16.put(col.r).put(col.g).put(col.b).put(col.a).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_DIFFUSE, fb16); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_SPECULAR, fb16); + + pos = sLight.getPosition(); + fb16.clear(); + fb16.put(pos.x).put(pos.y).put(pos.z).put(1.0f).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_POSITION, fb16); + + Vector3f dir = sLight.getDirection(); + fb16.clear(); + fb16.put(dir.x).put(dir.y).put(dir.z).put(1.0f).flip(); + gl.getGL2().glLightfv(glLightIndex, GL2.GL_SPOT_DIRECTION, fb16); + + float outerAngleRad = sLight.getSpotOuterAngle(); + float innerAngleRad = sLight.getSpotInnerAngle(); + float spotCut = outerAngleRad * FastMath.RAD_TO_DEG; + float spotExpo = 0.0f; + if (outerAngleRad > 0) { + spotExpo = (1.0f - (innerAngleRad / outerAngleRad)) * 128.0f; + } + + gl.getGL2().glLightf(glLightIndex, GL2.GL_SPOT_CUTOFF, spotCut); + gl.getGL2().glLightf(glLightIndex, GL2.GL_SPOT_EXPONENT, spotExpo); + + if (sLight.getSpotRange() > 0) { + gl.getGL2().glLightf(glLightIndex, GL2.GL_LINEAR_ATTENUATION, sLight.getInvSpotRange()); + }else{ + gl.getGL2().glLightf(glLightIndex, GL2.GL_LINEAR_ATTENUATION, 0); + } + + break; + default: + throw new UnsupportedOperationException( + "Unrecognized light type: " + lightType); + } + } + + // Disable lights after the index + for (int i = lightList.size(); i < numLightsSetPrev; i++){ + gl.glDisable(GL2.GL_LIGHT0 + i); + } + + // This will set view matrix as well. + setModelView(worldMatrix, viewMatrix); + } + + private int convertTextureType(Texture.Type type) { + switch (type) { + case TwoDimensional: + return GL2.GL_TEXTURE_2D; +// case ThreeDimensional: +// return GL_TEXTURE_3D; +// case CubeMap: +// return GL_TEXTURE_CUBE_MAP; + default: + throw new UnsupportedOperationException("Unknown texture type: " + type); + } + } + + private int convertMagFilter(Texture.MagFilter filter) { + switch (filter) { + case Bilinear: + return GL2.GL_LINEAR; + case Nearest: + return GL2.GL_NEAREST; + default: + throw new UnsupportedOperationException("Unknown mag filter: " + filter); + } + } + + private int convertMinFilter(Texture.MinFilter filter) { + switch (filter) { + case Trilinear: + return GL2.GL_LINEAR_MIPMAP_LINEAR; + case BilinearNearestMipMap: + return GL2.GL_LINEAR_MIPMAP_NEAREST; + case NearestLinearMipMap: + return GL2.GL_NEAREST_MIPMAP_LINEAR; + case NearestNearestMipMap: + return GL2.GL_NEAREST_MIPMAP_NEAREST; + case BilinearNoMipMaps: + return GL2.GL_LINEAR; + case NearestNoMipMaps: + return GL2.GL_NEAREST; + default: + throw new UnsupportedOperationException("Unknown min filter: " + filter); + } + } + + private int convertWrapMode(Texture.WrapMode mode) { + switch (mode) { + case EdgeClamp: + case Clamp: + case BorderClamp: + return GL2.GL_CLAMP; + case Repeat: + return GL2.GL_REPEAT; + default: + throw new UnsupportedOperationException("Unknown wrap mode: " + mode); + } + } + + private void setupTextureParams(Texture tex) { + int target = convertTextureType(tex.getType()); + + // filter things + int minFilter = convertMinFilter(tex.getMinFilter()); + int magFilter = convertMagFilter(tex.getMagFilter()); + GL gl = GLContext.getCurrentGL(); + gl.glTexParameteri(target, GL2.GL_TEXTURE_MIN_FILTER, minFilter); + gl.glTexParameteri(target, GL2.GL_TEXTURE_MAG_FILTER, magFilter); + + // repeat modes + switch (tex.getType()) { +// case ThreeDimensional: +// case CubeMap: +// glTexParameteri(target, GL_TEXTURE_WRAP_R, convertWrapMode(tex.getWrap(WrapAxis.R))); + case TwoDimensional: + gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_T, convertWrapMode(tex.getWrap(WrapAxis.T))); + // fall down here is intentional.. +// case OneDimensional: + gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_S, convertWrapMode(tex.getWrap(WrapAxis.S))); + break; + default: + throw new UnsupportedOperationException("Unknown texture type: " + tex.getType()); + } + } + + public void updateTexImageData(Image img, Texture.Type type, int unit) { + int texId = img.getId(); + GL gl = GLContext.getCurrentGL(); + if (texId == -1) { + // create texture + gl.glGenTextures(1, ib1); + texId = ib1.get(0); + img.setId(texId); + objManager.registerForCleanup(img); + + statistics.onNewTexture(); + } + + // bind texture + int target = convertTextureType(type); +// if (context.boundTextureUnit != unit) { +// glActiveTexture(GL_TEXTURE0 + unit); +// context.boundTextureUnit = unit; +// } + if (context.boundTextures[unit] != img) { + gl.glEnable(target); + gl.glBindTexture(target, texId); + context.boundTextures[unit] = img; + + statistics.onTextureUse(img, true); + } + + // Check sizes if graphics card doesn't support NPOT + if (!gl.isExtensionAvailable("GL_ARB_texture_non_power_of_two")) { + if (img.getWidth() != 0 && img.getHeight() != 0) { + if (!FastMath.isPowerOfTwo(img.getWidth()) + || !FastMath.isPowerOfTwo(img.getHeight())) { + + // Resize texture to Power-of-2 size + MipMapGenerator.resizeToPowerOf2(img); + } + } + } + + if (!img.hasMipmaps() && img.isGeneratedMipmapsRequired()) { + // No pregenerated mips available, + // generate from base level if required + + // Check if hardware mips are supported + if (gl.isExtensionAvailable("GL_VERSION_1_4")) { + gl.glTexParameteri(target, GL2.GL_GENERATE_MIPMAP, GL.GL_TRUE); + } else { + MipMapGenerator.generateMipMaps(img); + } + img.setMipmapsGenerated(true); + } else { + } + + /* + if (target == GL_TEXTURE_CUBE_MAP) { + List data = img.getData(); + if (data.size() != 6) { + logger.log(Level.WARNING, "Invalid texture: {0}\n" + + "Cubemap textures must contain 6 data units.", img); + return; + } + for (int i = 0; i < 6; i++) { + TextureUtil.uploadTexture(img, GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, i, 0, tdc); + } + } else if (target == EXTTextureArray.GL_TEXTURE_2D_ARRAY_EXT) { + List data = img.getData(); + // -1 index specifies prepare data for 2D Array + TextureUtil.uploadTexture(img, target, -1, 0, tdc); + for (int i = 0; i < data.size(); i++) { + // upload each slice of 2D array in turn + // this time with the appropriate index + TextureUtil.uploadTexture(img, target, i, 0, tdc); + } + } else {*/ + TextureUtil.uploadTexture(img, target, 0, 0); + //} + + img.clearUpdateNeeded(); + } + + public void setTexture(int unit, Texture tex) { + if (unit != 0 || tex.getType() != Texture.Type.TwoDimensional) { + //throw new UnsupportedOperationException(); + return; + } + + Image image = tex.getImage(); + if (image.isUpdateNeeded() || (image.isGeneratedMipmapsRequired() && !image.isMipmapsGenerated()) ) { + updateTexImageData(image, tex.getType(), unit); + } + + int texId = image.getId(); + assert texId != -1; + + Image[] textures = context.boundTextures; + + int type = convertTextureType(tex.getType()); +// if (!context.textureIndexList.moveToNew(unit)) { +// if (context.boundTextureUnit != unit){ +// gl.glActiveTexture(GL.GL_TEXTURE0 + unit); +// context.boundTextureUnit = unit; +// } +// gl.glEnable(type); +// } + +// if (context.boundTextureUnit != unit) { +// gl.glActiveTexture(GL.GL_TEXTURE0 + unit); +// context.boundTextureUnit = unit; +// } + + if (textures[unit] != image) { + GL gl = GLContext.getCurrentGL(); + gl.glEnable(type); + gl.glBindTexture(type, texId); + textures[unit] = image; + + statistics.onTextureUse(image, true); + } else { + statistics.onTextureUse(image, false); + } + + setupTextureParams(tex); + } + + private void clearTextureUnits() { + Image[] textures = context.boundTextures; + if (textures[0] != null) { + GL gl = GLContext.getCurrentGL(); + gl.glDisable(GL2.GL_TEXTURE_2D); + textures[0] = null; + } + } + + public void deleteImage(Image image) { + int texId = image.getId(); + if (texId != -1) { + ib1.put(0, texId); + ib1.position(0).limit(1); + GL gl = GLContext.getCurrentGL(); + gl.glDeleteTextures(ib1.limit() ,ib1); + image.resetObject(); + } + } + + private int convertArrayType(VertexBuffer.Type type) { + switch (type) { + case Position: + return GL2.GL_VERTEX_ARRAY; + case Normal: + return GL2.GL_NORMAL_ARRAY; + case TexCoord: + return GL2.GL_TEXTURE_COORD_ARRAY; + case Color: + return GL2.GL_COLOR_ARRAY; + default: + return -1; // unsupported + } + } + + private int convertVertexFormat(VertexBuffer.Format fmt) { + switch (fmt) { + case Byte: + return GL2.GL_BYTE; + case Float: + return GL2.GL_FLOAT; + case Int: + return GL2.GL_INT; + case Short: + return GL2.GL_SHORT; + case UnsignedByte: + return GL2.GL_UNSIGNED_BYTE; + case UnsignedInt: + return GL2.GL_UNSIGNED_INT; + case UnsignedShort: + return GL2.GL_UNSIGNED_SHORT; + default: + throw new UnsupportedOperationException("Unrecognized vertex format: " + fmt); + } + } + + private int convertElementMode(Mesh.Mode mode) { + switch (mode) { + case Points: + return GL2.GL_POINTS; + case Lines: + return GL2.GL_LINES; + case LineLoop: + return GL2.GL_LINE_LOOP; + case LineStrip: + return GL2.GL_LINE_STRIP; + case Triangles: + return GL.GL_TRIANGLES; + case TriangleFan: + return GL2.GL_TRIANGLE_FAN; + case TriangleStrip: + return GL2.GL_TRIANGLE_STRIP; + default: + throw new UnsupportedOperationException("Unrecognized mesh mode: " + mode); + } + } + + public void drawTriangleArray(Mesh.Mode mode, int count, int vertCount) { + if (count > 1) { + throw new UnsupportedOperationException(); + } + GL gl = GLContext.getCurrentGL(); + gl.glDrawArrays(convertElementMode(mode), 0, vertCount); + } + + public void setVertexAttrib(VertexBuffer vb, VertexBuffer idb) { + if (vb.getBufferType() == VertexBuffer.Type.Color && !context.useVertexColor) { + // Ignore vertex color buffer if vertex color is disabled. + return; + } + + int arrayType = convertArrayType(vb.getBufferType()); + if (arrayType == -1) { + return; // unsupported + } + GL gl = GLContext.getCurrentGL(); + gl.getGL2().glEnableClientState(arrayType); + context.boundAttribs[vb.getBufferType().ordinal()] = vb; + + if (vb.getBufferType() == Type.Normal) { + // normalize if requested + if (vb.isNormalized() && !context.normalizeEnabled) { + gl.glEnable(GL2.GL_NORMALIZE); + context.normalizeEnabled = true; + } else if (!vb.isNormalized() && context.normalizeEnabled) { + gl.glDisable(GL2.GL_NORMALIZE); + context.normalizeEnabled = false; + } + } + + // NOTE: Use data from interleaved buffer if specified + Buffer data = idb != null ? idb.getData() : vb.getData(); + int comps = vb.getNumComponents(); + int type = convertVertexFormat(vb.getFormat()); + + data.rewind(); + + switch (vb.getBufferType()) { + case Position: + if (!(data instanceof FloatBuffer)) { + throw new UnsupportedOperationException(); + } + + gl.getGL2().glVertexPointer(comps, type, vb.getStride(), (FloatBuffer) data); + break; + case Normal: + if (!(data instanceof FloatBuffer)) { + throw new UnsupportedOperationException(); + } + + gl.getGL2().glNormalPointer(type, vb.getStride(), (FloatBuffer) data); + break; + case Color: + if (data instanceof FloatBuffer) { + gl.getGL2().glColorPointer(comps, type, vb.getStride(), (FloatBuffer) data); + } else if (data instanceof ByteBuffer) { + gl.getGL2().glColorPointer(comps, type, vb.getStride(), (ByteBuffer) data); + } else { + throw new UnsupportedOperationException(); + } + break; + case TexCoord: + if (!(data instanceof FloatBuffer)) { + throw new UnsupportedOperationException(); + } + + gl.getGL2().glTexCoordPointer(comps, type, vb.getStride(), (FloatBuffer) data); + break; + default: + // Ignore, this is an unsupported attribute for OpenGL1. + break; + } + } + + public void setVertexAttrib(VertexBuffer vb) { + setVertexAttrib(vb, null); + } + + private void drawElements(int mode, int format, Buffer data) { + GL gl = GLContext.getCurrentGL(); + switch (format) { + case GL2.GL_UNSIGNED_BYTE: + gl.getGL2().glDrawElements(mode, data.limit(), format, (ByteBuffer) data); + break; + case GL2.GL_UNSIGNED_SHORT: + gl.getGL2().glDrawElements(mode, data.limit(), format, (ShortBuffer) data); + break; + case GL2.GL_UNSIGNED_INT: + gl.getGL2().glDrawElements(mode, data.limit(), format, (IntBuffer) data); + break; + default: + throw new UnsupportedOperationException(); + } + } + + public void drawTriangleList(VertexBuffer indexBuf, Mesh mesh, int count) { + Mesh.Mode mode = mesh.getMode(); + + Buffer indexData = indexBuf.getData(); + indexData.rewind(); + + if (mesh.getMode() == Mode.Hybrid) { + throw new UnsupportedOperationException(); + /* + int[] modeStart = mesh.getModeStart(); + int[] elementLengths = mesh.getElementLengths(); + + int elMode = convertElementMode(Mode.Triangles); + int fmt = convertVertexFormat(indexBuf.getFormat()); + // int elSize = indexBuf.getFormat().getComponentSize(); + // int listStart = modeStart[0]; + int stripStart = modeStart[1]; + int fanStart = modeStart[2]; + int curOffset = 0; + for (int i = 0; i < elementLengths.length; i++) { + if (i == stripStart) { + elMode = convertElementMode(Mode.TriangleStrip); + } else if (i == fanStart) { + elMode = convertElementMode(Mode.TriangleStrip); + } + int elementLength = elementLengths[i]; + indexData.position(curOffset); + + drawElements(elMode, + fmt, + indexData); + + curOffset += elementLength; + }*/ + } else { + drawElements(convertElementMode(mode), + convertVertexFormat(indexBuf.getFormat()), + indexData); + } + } + + public void clearVertexAttribs() { + for (int i = 0; i < 16; i++) { + VertexBuffer vb = context.boundAttribs[i]; + if (vb != null) { + int arrayType = convertArrayType(vb.getBufferType()); + GL gl = GLContext.getCurrentGL(); + gl.getGL2().glDisableClientState(arrayType); + context.boundAttribs[vb.getBufferType().ordinal()] = null; + } + } + } + + private void renderMeshDefault(Mesh mesh, int lod, int count) { + VertexBuffer indices = null; + + VertexBuffer interleavedData = mesh.getBuffer(Type.InterleavedData); + if (interleavedData != null && interleavedData.isUpdateNeeded()) { + updateBufferData(interleavedData); + } + + if (mesh.getNumLodLevels() > 0) { + indices = mesh.getLodLevel(lod); + } else { + indices = mesh.getBuffer(Type.Index); + } + for (VertexBuffer vb : mesh.getBufferList().getArray()) { + if (vb.getBufferType() == Type.InterleavedData + || vb.getUsage() == Usage.CpuOnly // ignore cpu-only buffers + || vb.getBufferType() == Type.Index) { + continue; + } + + if (vb.getStride() == 0) { + // not interleaved + setVertexAttrib(vb); + } else { + // interleaved + setVertexAttrib(vb, interleavedData); + } + } + + if (indices != null) { + drawTriangleList(indices, mesh, count); + } else { + GL gl = GLContext.getCurrentGL(); + gl.glDrawArrays(convertElementMode(mesh.getMode()), 0, mesh.getVertexCount()); + } + + // TODO: Fix these to use IDList?? + clearVertexAttribs(); + clearTextureUnits(); + resetFixedFuncBindings(); + } + + public void renderMesh(Mesh mesh, int lod, int count) { + if (mesh.getVertexCount() == 0) { + return; + } + GL gl = GLContext.getCurrentGL(); + if (context.pointSize != mesh.getPointSize()) { + gl.getGL2().glPointSize(mesh.getPointSize()); + context.pointSize = mesh.getPointSize(); + } + if (context.lineWidth != mesh.getLineWidth()) { + gl.getGL2().glLineWidth(mesh.getLineWidth()); + context.lineWidth = mesh.getLineWidth(); + } + + boolean dynamic = false; + if (mesh.getBuffer(Type.InterleavedData) != null) { + throw new UnsupportedOperationException("Interleaved meshes are not supported"); + } + + if (mesh.getNumLodLevels() == 0) { + for (VertexBuffer vb : mesh.getBufferList().getArray()) { + if (vb.getUsage() != VertexBuffer.Usage.Static) { + dynamic = true; + break; + } + } + } else { + dynamic = true; + } + + statistics.onMeshDrawn(mesh, lod); + +// if (!dynamic) { + // dealing with a static object, generate display list +// renderMeshDisplayList(mesh); +// } else { + renderMeshDefault(mesh, lod, count); +// } + + + } + + public void setAlphaToCoverage(boolean value) { + } + + public void setShader(Shader shader) { + } + + public void deleteShader(Shader shader) { + } + + public void deleteShaderSource(ShaderSource source) { + } + + public void copyFrameBuffer(FrameBuffer src, FrameBuffer dst) { + } + + public void copyFrameBuffer(FrameBuffer src, FrameBuffer dst, boolean copyDepth) { + } + + public void setMainFrameBufferOverride(FrameBuffer fb){ + } + + public void setFrameBuffer(FrameBuffer fb) { + } + + public void readFrameBuffer(FrameBuffer fb, ByteBuffer byteBuf) { + } + + public void deleteFrameBuffer(FrameBuffer fb) { + } + + public void updateBufferData(VertexBuffer vb) { + } + + public void deleteBuffer(VertexBuffer vb) { + } +} diff --git a/engine/src/jogl/com/jme3/renderer/jogl/JoglRenderer.java b/engine/src/jogl/com/jme3/renderer/jogl/JoglRenderer.java new file mode 100644 index 000000000..6e5f8f296 --- /dev/null +++ b/engine/src/jogl/com/jme3/renderer/jogl/JoglRenderer.java @@ -0,0 +1,1877 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.renderer.jogl; + +import com.jme3.light.LightList; +import com.jme3.material.RenderState; +import com.jme3.math.ColorRGBA; +import com.jme3.math.FastMath; +import com.jme3.math.Matrix4f; +import com.jme3.renderer.Caps; +import com.jme3.renderer.IDList; +import com.jme3.renderer.RenderContext; +import com.jme3.renderer.Renderer; +import com.jme3.renderer.RendererException; +import com.jme3.renderer.Statistics; +import com.jme3.scene.Mesh; +import com.jme3.scene.Mesh.Mode; +import com.jme3.scene.VertexBuffer; +import com.jme3.scene.VertexBuffer.Type; +import com.jme3.scene.VertexBuffer.Usage; +import com.jme3.shader.Shader; +import com.jme3.shader.Shader.ShaderSource; +import com.jme3.texture.FrameBuffer; +import com.jme3.texture.FrameBuffer.RenderBuffer; +import com.jme3.texture.Image; +import com.jme3.texture.Texture; +import com.jme3.texture.Texture.WrapAxis; +import com.jme3.util.BufferUtils; +import com.jme3.util.IntMap; +import com.jme3.util.IntMap.Entry; +import com.jme3.util.NativeObjectManager; +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.nio.FloatBuffer; +import java.nio.IntBuffer; +import java.util.EnumSet; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.media.opengl.GL; +import javax.media.opengl.GL2; +import javax.media.opengl.GL2ES1; +import javax.media.opengl.GL2ES2; +import javax.media.opengl.GL2GL3; +import javax.media.opengl.GLContext; +import javax.media.opengl.fixedfunc.GLLightingFunc; +import javax.media.opengl.fixedfunc.GLMatrixFunc; +import javax.media.opengl.fixedfunc.GLPointerFunc; +import jme3tools.converters.MipMapGenerator; + +public class JoglRenderer implements Renderer { + + private static final Logger logger = Logger.getLogger(JoglRenderer.class.getName()); + + protected Statistics statistics = new Statistics(); + + protected Matrix4f worldMatrix = new Matrix4f(); + + protected Matrix4f viewMatrix = new Matrix4f(); + + protected Matrix4f projMatrix = new Matrix4f(); + + protected FloatBuffer fb16 = BufferUtils.createFloatBuffer(16); + + private final IntBuffer intBuf1 = BufferUtils.createIntBuffer(1); + + private final IntBuffer intBuf16 = BufferUtils.createIntBuffer(16); + + private RenderContext context = new RenderContext(); + + private NativeObjectManager objManager = new NativeObjectManager(); + + private EnumSet caps = EnumSet.noneOf(Caps.class); + + private Shader boundShader; + + private int initialDrawBuf, initialReadBuf; + + private int glslVer; + + private int vertexTextureUnits; + + private int fragTextureUnits; + + private int vertexUniforms; + + private int fragUniforms; + + private int vertexAttribs; + + private int maxFBOSamples; + + private int maxFBOAttachs; + + private int maxMRTFBOAttachs; + + private int maxRBSize; + + private int maxTexSize; + + private int maxCubeTexSize; + + private int maxVertCount; + + private int maxTriCount; + + private int maxColorTexSamples; + + private int maxDepthTexSamples; + + private boolean tdc; + + private boolean powerOf2 = false; + + private boolean hardwareMips = false; + + private boolean vbo = false; + + private int vpX, vpY, vpW, vpH; + + private FrameBuffer lastFb = null; + private FrameBuffer mainFbOverride = null; + + public JoglRenderer() { + } + + public Statistics getStatistics() { + return statistics; + } + + public void initialize() { + GL gl = GLContext.getCurrentGL(); + logger.log(Level.INFO, "Vendor: {0}", gl.glGetString(GL.GL_VENDOR)); + logger.log(Level.INFO, "Renderer: {0}", gl.glGetString(GL.GL_RENDERER)); + logger.log(Level.INFO, "Version: {0}", gl.glGetString(GL.GL_VERSION)); + + applyRenderState(RenderState.DEFAULT); + + powerOf2 = true/*gl.isExtensionAvailable("GL_ARB_texture_non_power_of_two")*/; + hardwareMips = true/*gl.isExtensionAvailable("GL_SGIS_generate_mipmap")*/; + vbo = true/*gl.isExtensionAvailable("GL_ARB_vertex_buffer_object")*/; + + if (gl.isExtensionAvailable("GL_VERSION_2_0")) { + caps.add(Caps.OpenGL20); + } + if (gl.isExtensionAvailable("GL_VERSION_2_1")) { + caps.add(Caps.OpenGL21); + } + if (gl.isExtensionAvailable("GL_VERSION_3_0")) { + caps.add(Caps.OpenGL30); + } + + String versionStr = gl.glGetString(GL2ES2.GL_SHADING_LANGUAGE_VERSION); + if (versionStr == null || versionStr.equals("")) { + glslVer = -1; + // no, I need the support of low end graphics cards too + /*throw new UnsupportedOperationException("GLSL and OpenGL2 is " + + "required for the JOGL " + + "renderer!");*/ + } + else { + int spaceIdx = versionStr.indexOf(" "); + if (spaceIdx >= 1) { + versionStr = versionStr.substring(0, spaceIdx); + } + float version = Float.parseFloat(versionStr); + glslVer = (int) (version * 100); + + switch (glslVer) { + default: + if (glslVer < 400) { + break; + } + + // so that future OpenGL revisions wont break jme3 + + // fall through intentional + case 400: + case 330: + case 150: + caps.add(Caps.GLSL150); + case 140: + caps.add(Caps.GLSL140); + case 130: + caps.add(Caps.GLSL130); + case 120: + caps.add(Caps.GLSL120); + case 110: + caps.add(Caps.GLSL110); + case 100: + caps.add(Caps.GLSL100); + break; + } + // N.B: do NOT force GLSL100 support + } + + gl.glGetIntegerv(GL2GL3.GL_DRAW_BUFFER, intBuf1); + initialDrawBuf = intBuf1.get(0); + gl.glGetIntegerv(GL2GL3.GL_READ_BUFFER, intBuf1); + initialReadBuf = intBuf1.get(0); + + gl.glGetIntegerv(GL2ES2.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, intBuf16); + vertexTextureUnits = intBuf16.get(0); + logger.log(Level.FINER, "VTF Units: {0}", vertexTextureUnits); + if (vertexTextureUnits > 0) { + caps.add(Caps.VertexTextureFetch); + } + + gl.glGetIntegerv(GL2ES2.GL_MAX_TEXTURE_IMAGE_UNITS, intBuf16); + fragTextureUnits = intBuf16.get(0); + logger.log(Level.FINER, "Texture Units: {0}", fragTextureUnits); + + gl.glGetIntegerv(GL2GL3.GL_MAX_VERTEX_UNIFORM_COMPONENTS, intBuf16); + vertexUniforms = intBuf16.get(0); + logger.log(Level.FINER, "Vertex Uniforms: {0}", vertexUniforms); + + gl.glGetIntegerv(GL2GL3.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, intBuf16); + fragUniforms = intBuf16.get(0); + logger.log(Level.FINER, "Fragment Uniforms: {0}", fragUniforms); + + gl.glGetIntegerv(GL2ES2.GL_MAX_VERTEX_ATTRIBS, intBuf16); + vertexAttribs = intBuf16.get(0); + logger.log(Level.FINER, "Vertex Attributes: {0}", vertexAttribs); + + gl.glGetIntegerv(GL2GL3.GL_MAX_VARYING_FLOATS, intBuf16); + int varyingFloats = intBuf16.get(0); + logger.log(Level.FINER, "Varying Floats: {0}", varyingFloats); + + gl.glGetIntegerv(GL.GL_SUBPIXEL_BITS, intBuf16); + int subpixelBits = intBuf16.get(0); + logger.log(Level.FINER, "Subpixel Bits: {0}", subpixelBits); + + gl.glGetIntegerv(GL2GL3.GL_MAX_ELEMENTS_VERTICES, intBuf16); + maxVertCount = intBuf16.get(0); + logger.log(Level.FINER, "Preferred Batch Vertex Count: {0}", maxVertCount); + + gl.glGetIntegerv(GL2GL3.GL_MAX_ELEMENTS_INDICES, intBuf16); + maxTriCount = intBuf16.get(0); + logger.log(Level.FINER, "Preferred Batch Index Count: {0}", maxTriCount); + + gl.glGetIntegerv(GL.GL_MAX_TEXTURE_SIZE, intBuf16); + maxTexSize = intBuf16.get(0); + logger.log(Level.FINER, "Maximum Texture Resolution: {0}", maxTexSize); + + gl.glGetIntegerv(GL.GL_MAX_CUBE_MAP_TEXTURE_SIZE, intBuf16); + maxCubeTexSize = intBuf16.get(0); + logger.log(Level.FINER, "Maximum CubeMap Resolution: {0}", maxCubeTexSize); + + // if (gl.isExtensionAvailable("GL_ARB_color_buffer_float")) { + // XXX: Require both 16 and 32 bit float support for FloatColorBuffer. + // if (gl.isExtensionAvailable("GL_ARB_half_float_pixel")) { + caps.add(Caps.FloatColorBuffer); + // } + // } + + // if (gl.isExtensionAvailable("GL_ARB_depth_buffer_float")) { + caps.add(Caps.FloatDepthBuffer); + // } + + // if (gl.isExtensionAvailable("GL_ARB_draw_instanced")) { + // caps.add(Caps.MeshInstancing); + // } + + // if (gl.isExtensionAvailable("GL_ARB_fragment_program")) { + caps.add(Caps.ARBprogram); + // } + + // if (gl.isExtensionAvailable("GL_ARB_texture_buffer_object")) { + caps.add(Caps.TextureBuffer); + // } + + // if (gl.isExtensionAvailable("GL_ARB_texture_float")) { + // if (gl.isExtensionAvailable("GL_ARB_half_float_pixel")) { + caps.add(Caps.FloatTexture); + // } + // } + + // if (gl.isExtensionAvailable("GL_ARB_vertex_array_object")) { + caps.add(Caps.VertexBufferArray); + // } + + boolean latc = gl.isExtensionAvailable("GL_EXT_texture_compression_latc"); + boolean atdc = gl.isExtensionAvailable("GL_ATI_texture_compression_3dc"); + if (latc || atdc) { + caps.add(Caps.TextureCompressionLATC); + if (atdc && !latc) { + tdc = true; + } + } + + // if (gl.isExtensionAvailable("GL_EXT_packed_float")) { + caps.add(Caps.PackedFloatColorBuffer); + // if (gl.isExtensionAvailable("GL_ARB_half_float_pixel")) { + // because textures are usually uploaded as RGB16F + // need half-float pixel + caps.add(Caps.PackedFloatTexture); + // } + // } + + // if (gl.isExtensionAvailable("GL_EXT_texture_array")) { + caps.add(Caps.TextureArray); + // } + + // if (gl.isExtensionAvailable("GL_EXT_texture_shared_exponent")) { + caps.add(Caps.SharedExponentTexture); + // } + + if (gl.isExtensionAvailable("GL_EXT_framebuffer_object")) { + caps.add(Caps.FrameBuffer); + + if (gl.isExtensionAvailable("GL_ARB_texture_multisample")) { + caps.add(Caps.TextureMultisample); + + gl.glGetIntegerv(GL2.GL_MAX_COLOR_TEXTURE_SAMPLES, intBuf16); + maxColorTexSamples = intBuf16.get(0); + logger.log(Level.FINER, "Texture Multisample Color Samples: {0}", maxColorTexSamples); + + gl.glGetIntegerv(GL2.GL_MAX_DEPTH_TEXTURE_SAMPLES, intBuf16); + maxDepthTexSamples = intBuf16.get(0); + logger.log(Level.FINER, "Texture Multisample Depth Samples: {0}", maxDepthTexSamples); + } + } + + gl.glGetIntegerv(GL.GL_MAX_RENDERBUFFER_SIZE, intBuf16); + maxRBSize = intBuf16.get(0); + logger.log(Level.FINER, "FBO RB Max Size: {0}", maxRBSize); + + gl.glGetIntegerv(GL2GL3.GL_MAX_COLOR_ATTACHMENTS, intBuf16); + maxFBOAttachs = intBuf16.get(0); + logger.log(Level.FINER, "FBO Max renderbuffers: {0}", maxFBOAttachs); + + if (gl.isExtensionAvailable("GL_EXT_framebuffer_multisample")) { + caps.add(Caps.FrameBufferMultisample); + + gl.glGetIntegerv(GL2GL3.GL_MAX_SAMPLES, intBuf16); + maxFBOSamples = intBuf16.get(0); + logger.log(Level.FINER, "FBO Max Samples: {0}", maxFBOSamples); + } + + if (gl.isExtensionAvailable("GL_ARB_draw_buffers")) { + caps.add(Caps.FrameBufferMRT); + gl.glGetIntegerv(GL2GL3.GL_MAX_DRAW_BUFFERS, intBuf16); + maxMRTFBOAttachs = intBuf16.get(0); + logger.log(Level.FINER, "FBO Max MRT renderbuffers: {0}", maxMRTFBOAttachs); + } + // } + + // if (gl.isExtensionAvailable("GL_ARB_multisample")) { + gl.glGetIntegerv(GL.GL_SAMPLE_BUFFERS, intBuf16); + boolean available = intBuf16.get(0) != 0; + gl.glGetIntegerv(GL.GL_SAMPLES, intBuf16); + int samples = intBuf16.get(0); + logger.log(Level.FINER, "Samples: {0}", samples); + boolean enabled = gl.glIsEnabled(GL.GL_MULTISAMPLE); + if (samples > 0 && available && !enabled) { + gl.glEnable(GL.GL_MULTISAMPLE); + } + // } + } + + public EnumSet getCaps() { + return caps; + } + + public void setBackgroundColor(ColorRGBA color) { + GL gl = GLContext.getCurrentGL(); + gl.glClearColor(color.r, color.g, color.b, color.a); + } + + public void setAlphaToCoverage(boolean value) { + if (caps.contains(Caps.Multisample)) { + GL gl = GLContext.getCurrentGL(); + if (value) { + gl.glEnable(GL2.GL_SAMPLE_ALPHA_TO_COVERAGE); + } else { + gl.glDisable(GL2.GL_SAMPLE_ALPHA_TO_COVERAGE); + } + } + } + + public void cleanup() { + objManager.deleteAllObjects(this); + } + + public void resetGLObjects() { + objManager.resetObjects(); + statistics.clearMemory(); + boundShader = null; + lastFb = null; + context.reset(); + } + + public void clearBuffers(boolean color, boolean depth, boolean stencil) { + GL gl = GLContext.getCurrentGL(); + int bits = 0; + if (color) { + bits = GL.GL_COLOR_BUFFER_BIT; + } + if (depth) { + bits |= GL.GL_DEPTH_BUFFER_BIT; + } + if (stencil) { + bits |= GL.GL_STENCIL_BUFFER_BIT; + } + if (bits != 0) { + gl.glClear(bits); + } + } + + public void applyRenderState(RenderState state) { + GL gl = GLContext.getCurrentGL(); + if (state.isWireframe() && !context.wireframe) { + gl.getGL2().glPolygonMode(GL.GL_FRONT_AND_BACK, GL2GL3.GL_LINE); + context.wireframe = true; + } + else if (!state.isWireframe() && context.wireframe) { + gl.getGL2().glPolygonMode(GL.GL_FRONT_AND_BACK, GL2GL3.GL_FILL); + context.wireframe = false; + } + if (state.isDepthTest() && !context.depthTestEnabled) { + gl.glEnable(GL.GL_DEPTH_TEST); + gl.glDepthFunc(GL.GL_LEQUAL); + context.depthTestEnabled = true; + } + else if (!state.isDepthTest() && context.depthTestEnabled) { + gl.glDisable(GL.GL_DEPTH_TEST); + context.depthTestEnabled = false; + } + if (state.isAlphaTest() && context.alphaTestFallOff == 0) { + gl.glEnable(GL2ES1.GL_ALPHA_TEST); + gl.getGL2().glAlphaFunc(GL.GL_GREATER, state.getAlphaFallOff()); + context.alphaTestFallOff = state.getAlphaFallOff(); + } + else if (!state.isAlphaTest() && context.alphaTestFallOff != 0) { + gl.glDisable(GL2ES1.GL_ALPHA_TEST); + context.alphaTestFallOff = 0; + } + if (state.isDepthWrite() && !context.depthWriteEnabled) { + gl.glDepthMask(true); + context.depthWriteEnabled = true; + } + else if (!state.isDepthWrite() && context.depthWriteEnabled) { + gl.glDepthMask(false); + context.depthWriteEnabled = false; + } + if (state.isColorWrite() && !context.colorWriteEnabled) { + gl.glColorMask(true, true, true, true); + context.colorWriteEnabled = true; + } + else if (!state.isColorWrite() && context.colorWriteEnabled) { + gl.glColorMask(false, false, false, false); + context.colorWriteEnabled = false; + } + if (state.isPolyOffset()) { + if (!context.polyOffsetEnabled) { + gl.glEnable(GL.GL_POLYGON_OFFSET_FILL); + gl.glPolygonOffset(state.getPolyOffsetFactor(), state.getPolyOffsetUnits()); + context.polyOffsetEnabled = true; + context.polyOffsetFactor = state.getPolyOffsetFactor(); + context.polyOffsetUnits = state.getPolyOffsetUnits(); + } + else { + if (state.getPolyOffsetFactor() != context.polyOffsetFactor + || state.getPolyOffsetUnits() != context.polyOffsetUnits) { + gl.glPolygonOffset(state.getPolyOffsetFactor(), state.getPolyOffsetUnits()); + context.polyOffsetFactor = state.getPolyOffsetFactor(); + context.polyOffsetUnits = state.getPolyOffsetUnits(); + } + } + } + else { + if (context.polyOffsetEnabled) { + gl.glDisable(GL.GL_POLYGON_OFFSET_FILL); + context.polyOffsetEnabled = false; + context.polyOffsetFactor = 0; + context.polyOffsetUnits = 0; + } + } + if (state.getFaceCullMode() != context.cullMode) { + if (state.getFaceCullMode() == RenderState.FaceCullMode.Off) { + gl.glDisable(GL.GL_CULL_FACE); + } + else { + gl.glEnable(GL.GL_CULL_FACE); + } + + switch (state.getFaceCullMode()) { + case Off: + break; + case Back: + gl.glCullFace(GL.GL_BACK); + break; + case Front: + gl.glCullFace(GL.GL_FRONT); + break; + case FrontAndBack: + gl.glCullFace(GL.GL_FRONT_AND_BACK); + break; + default: + throw new UnsupportedOperationException("Unrecognized face cull mode: " + + state.getFaceCullMode()); + } + + context.cullMode = state.getFaceCullMode(); + } + + if (state.getBlendMode() != context.blendMode) { + if (state.getBlendMode() == RenderState.BlendMode.Off) { + gl.glDisable(GL.GL_BLEND); + } + else { + gl.glEnable(GL.GL_BLEND); + } + + switch (state.getBlendMode()) { + case Off: + break; + case Additive: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE); + break; + case AlphaAdditive: + gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE); + break; + case Alpha: + gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA); + break; + case Color: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_COLOR); + break; + case PremultAlpha: + gl.glBlendFunc(GL.GL_ONE, GL.GL_ONE_MINUS_SRC_ALPHA); + break; + case Modulate: + gl.glBlendFunc(GL.GL_DST_COLOR, GL.GL_ZERO); + break; + case ModulateX2: + gl.glBlendFunc(GL.GL_DST_COLOR, GL.GL_SRC_COLOR); + break; + default: + throw new UnsupportedOperationException("Unrecognized blend mode: " + + state.getBlendMode()); + } + + context.blendMode = state.getBlendMode(); + } + } + + public void onFrame() { + objManager.deleteUnused(this); + } + + public void setDepthRange(float start, float end) { + GL gl = GLContext.getCurrentGL(); + gl.glDepthRange(start, end); + } + + public void setViewPort(int x, int y, int width, int height) { + GL gl = GLContext.getCurrentGL(); + gl.glViewport(x, y, width, height); + vpX = x; + vpY = y; + vpW = width; + vpH = height; + } + + public void setClipRect(int x, int y, int width, int height) { + GL gl = GLContext.getCurrentGL(); + if (!context.clipRectEnabled) { + gl.glEnable(GL.GL_SCISSOR_TEST); + context.clipRectEnabled = true; + } + gl.glScissor(x, y, width, height); + } + + public void clearClipRect() { + if (context.clipRectEnabled) { + GL gl = GLContext.getCurrentGL(); + gl.glDisable(GL.GL_SCISSOR_TEST); + context.clipRectEnabled = false; + } + } + + private FloatBuffer storeMatrix(Matrix4f matrix, FloatBuffer store) { + store.rewind(); + matrix.fillFloatBuffer(store, true); + store.rewind(); + return store; + } + + public void setViewProjectionMatrices(Matrix4f viewMatrix, Matrix4f projMatrix) { + this.viewMatrix.set(viewMatrix); + this.projMatrix.set(projMatrix); + GL gl = GLContext.getCurrentGL(); + if (context.matrixMode != GLMatrixFunc.GL_PROJECTION) { + gl.getGL2().glMatrixMode(GLMatrixFunc.GL_PROJECTION); + context.matrixMode = GLMatrixFunc.GL_PROJECTION; + } + + gl.getGL2().glLoadMatrixf(storeMatrix(projMatrix, fb16)); + } + + public void setWorldMatrix(Matrix4f worldMatrix) { + this.worldMatrix.set(worldMatrix); + GL gl = GLContext.getCurrentGL(); + if (context.matrixMode != GLMatrixFunc.GL_MODELVIEW) { + gl.getGL2().glMatrixMode(GLMatrixFunc.GL_MODELVIEW); + context.matrixMode = GLMatrixFunc.GL_MODELVIEW; + } + + gl.getGL2().glLoadMatrixf(storeMatrix(viewMatrix, fb16)); + gl.getGL2().glMultMatrixf(storeMatrix(worldMatrix, fb16)); + } + + public void setLighting(LightList list) { + /*GL gl = GLContext.getCurrentGL(); + if (list == null || list.size() == 0) { + // turn off lighting + gl.glDisable(GLLightingFunc.GL_LIGHTING); + return; + } + + gl.glEnable(GLLightingFunc.GL_LIGHTING); + gl.getGL2().glShadeModel(GLLightingFunc.GL_SMOOTH); + + float[] temp = new float[4]; + + // reset model view to specify + // light positions in world space + // instead of model space + // gl.glPushMatrix(); + // gl.glLoadIdentity(); + + for (int i = 0; i < list.size() + 1; i++) { + + int lightId = GLLightingFunc.GL_LIGHT0 + i; + + if (list.size() <= i) { + // goes beyond the num lights we need + // disable it + gl.glDisable(lightId); + break; + } + + Light l = list.get(i); + + if (!l.isEnabled()) { + gl.glDisable(lightId); + continue; + } + + ColorRGBA color = l.getColor(); + color.toArray(temp); + + gl.glEnable(lightId); + gl.getGL2().glLightfv(lightId, GLLightingFunc.GL_DIFFUSE, temp, 0); + gl.getGL2().glLightfv(lightId, GLLightingFunc.GL_SPECULAR, temp, 0); + + ColorRGBA.Black.toArray(temp); + gl.getGL2().glLightfv(lightId, GLLightingFunc.GL_AMBIENT, temp, 0); + + switch (l.getType()) { + case Directional: + DirectionalLight dl = (DirectionalLight) l; + dl.getDirection().toArray(temp); + temp[3] = 0f; // marks to GL its a directional light + gl.getGL2().glLightfv(lightId, GLLightingFunc.GL_POSITION, temp, 0); + break; + case Point: + PointLight pl = (PointLight) l; + pl.getPosition().toArray(temp); + temp[3] = 1f; // marks to GL its a point light + gl.getGL2().glLightfv(lightId, GLLightingFunc.GL_POSITION, temp, 0); + break; + } + + } + + // restore modelview to original value + // gl.glPopMatrix(); + * */ + } + + public void deleteShaderSource(ShaderSource source) { + } + + public void setShader(Shader shader) { + } + + public void deleteShader(Shader shader) { + } + + public void copyFrameBuffer(FrameBuffer src, FrameBuffer dst) { + copyFrameBuffer(src, dst, true); + } + + public void setFrameBuffer(FrameBuffer fb) { + if (lastFb == fb) { + return; + } + + GL gl = GLContext.getCurrentGL(); + if (fb == null) { + // unbind any fbos + if (context.boundFBO != 0) { + gl.glBindFramebuffer(GL.GL_FRAMEBUFFER, 0); + statistics.onFrameBufferUse(null, true); + + context.boundFBO = 0; + } + // select back buffer + if (context.boundDrawBuf != -1) { + gl.getGL2().glDrawBuffer(initialDrawBuf); + context.boundDrawBuf = -1; + } + if (context.boundReadBuf != -1) { + gl.getGL2().glReadBuffer(initialReadBuf); + context.boundReadBuf = -1; + } + + lastFb = null; + } + else { + if (fb.isUpdateNeeded()) { + updateFrameBuffer(fb); + } + + if (context.boundFBO != fb.getId()) { + gl.glBindFramebuffer(GL.GL_FRAMEBUFFER, fb.getId()); + statistics.onFrameBufferUse(fb, true); + + // update viewport to reflect framebuffer's resolution + setViewPort(0, 0, fb.getWidth(), fb.getHeight()); + + context.boundFBO = fb.getId(); + } + else { + statistics.onFrameBufferUse(fb, false); + } + if (fb.getNumColorBuffers() == 0) { + // make sure to select NONE as draw buf + // no color buffer attached. select NONE + if (context.boundDrawBuf != -2) { + gl.getGL2().glDrawBuffer(GL.GL_NONE); + context.boundDrawBuf = -2; + } + if (context.boundReadBuf != -2) { + gl.getGL2().glReadBuffer(GL.GL_NONE); + context.boundReadBuf = -2; + } + } + else { + if (fb.isMultiTarget()) { + if (fb.getNumColorBuffers() > maxMRTFBOAttachs) { + throw new UnsupportedOperationException("Framebuffer has more" + + " targets than are supported" + " on the system!"); + } + + if (context.boundDrawBuf != 100 + fb.getNumColorBuffers()) { + intBuf16.clear(); + for (int i = 0; i < fb.getNumColorBuffers(); i++) { + intBuf16.put(GL.GL_COLOR_ATTACHMENT0 + i); + } + + intBuf16.flip(); + gl.getGL2().glDrawBuffers(intBuf16.limit(), intBuf16); + context.boundDrawBuf = 100 + fb.getNumColorBuffers(); + } + } + else { + RenderBuffer rb = fb.getColorBuffer(fb.getTargetIndex()); + // select this draw buffer + if (context.boundDrawBuf != rb.getSlot()) { + gl.getGL2().glDrawBuffer(GL.GL_COLOR_ATTACHMENT0 + rb.getSlot()); + context.boundDrawBuf = rb.getSlot(); + } + } + } + + assert fb.getId() >= 0; + assert context.boundFBO == fb.getId(); + lastFb = fb; + } + + try { + checkFrameBufferError(); + } + catch (IllegalStateException ex) { + logger.log(Level.SEVERE, "Problem FBO:\n{0}", fb); + throw ex; + } + } + + public void updateFrameBuffer(FrameBuffer fb) { + GL gl = GLContext.getCurrentGL(); + int id = fb.getId(); + if (id == -1) { + // create FBO + gl.glGenFramebuffers(1, intBuf1); + id = intBuf1.get(0); + fb.setId(id); + objManager.registerForCleanup(fb); + + statistics.onNewFrameBuffer(); + } + + if (context.boundFBO != id) { + gl.glBindFramebuffer(GL.GL_FRAMEBUFFER, id); + // binding an FBO automatically sets draw buf to GL_COLOR_ATTACHMENT0 + context.boundDrawBuf = 0; + context.boundFBO = id; + } + + FrameBuffer.RenderBuffer depthBuf = fb.getDepthBuffer(); + if (depthBuf != null) { + updateFrameBufferAttachment(fb, depthBuf); + } + + for (int i = 0; i < fb.getNumColorBuffers(); i++) { + FrameBuffer.RenderBuffer colorBuf = fb.getColorBuffer(i); + updateFrameBufferAttachment(fb, colorBuf); + } + + fb.clearUpdateNeeded(); + } + + private int convertAttachmentSlot(int attachmentSlot) { + // can also add support for stencil here + if (attachmentSlot == -100) { + return GL.GL_DEPTH_ATTACHMENT; + } + else if (attachmentSlot < 0 || attachmentSlot >= 16) { + throw new UnsupportedOperationException("Invalid FBO attachment slot: " + + attachmentSlot); + } + + return GL.GL_COLOR_ATTACHMENT0 + attachmentSlot; + } + + public void updateRenderTexture(FrameBuffer fb, RenderBuffer rb) { + GL gl = GLContext.getCurrentGL(); + Texture tex = rb.getTexture(); + Image image = tex.getImage(); + if (image.isUpdateNeeded()) { + updateTexImageData(image, tex.getType(), 0); + + // NOTE: For depth textures, sets nearest/no-mips mode + // Required to fix "framebuffer unsupported" + // for old NVIDIA drivers! + setupTextureParams(tex); + } + + gl.glFramebufferTexture2D(GL.GL_FRAMEBUFFER, convertAttachmentSlot(rb.getSlot()), + convertTextureType(tex.getType(), image.getMultiSamples(), rb.getFace()), + image.getId(), 0); + } + + public void updateFrameBufferAttachment(FrameBuffer fb, RenderBuffer rb) { + boolean needAttach; + if (rb.getTexture() == null) { + // if it hasn't been created yet, then attach is required. + needAttach = rb.getId() == -1; + updateRenderBuffer(fb, rb); + } + else { + needAttach = false; + updateRenderTexture(fb, rb); + } + if (needAttach) { + GL gl = GLContext.getCurrentGL(); + gl.glFramebufferRenderbuffer(GL.GL_FRAMEBUFFER, convertAttachmentSlot(rb.getSlot()), + GL.GL_RENDERBUFFER, rb.getId()); + } + } + + private void checkFrameBufferError() { + GL gl = GLContext.getCurrentGL(); + int status = gl.glCheckFramebufferStatus(GL.GL_FRAMEBUFFER); + switch (status) { + case GL.GL_FRAMEBUFFER_COMPLETE: + break; + case GL.GL_FRAMEBUFFER_UNSUPPORTED: + // Choose different formats + throw new IllegalStateException("Framebuffer object format is " + + "unsupported by the video hardware."); + case GL.GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: + throw new IllegalStateException("Framebuffer has erronous attachment."); + case GL.GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: + throw new IllegalStateException("Framebuffer is missing required attachment."); + case GL.GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: + throw new IllegalStateException( + "Framebuffer attachments must have same dimensions."); + case GL.GL_FRAMEBUFFER_INCOMPLETE_FORMATS: + throw new IllegalStateException("Framebuffer attachments must have same formats."); + case GL2GL3.GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER: + throw new IllegalStateException("Incomplete draw buffer."); + case GL2GL3.GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER: + throw new IllegalStateException("Incomplete read buffer."); + case GL2GL3.GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE: + throw new IllegalStateException("Incomplete multisample buffer."); + default: + // Programming error; will fail on all hardware + throw new IllegalStateException("Some video driver error " + + "or programming error occured. " + + "Framebuffer object status is invalid. "); + } + } + + private void updateRenderBuffer(FrameBuffer fb, RenderBuffer rb) { + GL gl = GLContext.getCurrentGL(); + int id = rb.getId(); + if (id == -1) { + gl.glGenRenderbuffers(1, intBuf1); + id = intBuf1.get(0); + rb.setId(id); + } + + if (context.boundRB != id) { + gl.glBindRenderbuffer(GL.GL_RENDERBUFFER, id); + context.boundRB = id; + } + + if (fb.getWidth() > maxRBSize || fb.getHeight() > maxRBSize) { + throw new UnsupportedOperationException("Resolution " + fb.getWidth() + ":" + + fb.getHeight() + " is not supported."); + } + + if (fb.getSamples() > 0 && gl.isExtensionAvailable("GL_EXT_framebuffer_multisample") + && gl.isFunctionAvailable("glRenderbufferStorageMultisample")) { + int samples = fb.getSamples(); + if (maxFBOSamples < samples) { + samples = maxFBOSamples; + } + gl.getGL2() + .glRenderbufferStorageMultisample(GL.GL_RENDERBUFFER, samples, + TextureUtil.convertTextureFormat(rb.getFormat()), fb.getWidth(), + fb.getHeight()); + } + else { + gl.glRenderbufferStorage(GL.GL_RENDERBUFFER, + TextureUtil.convertTextureFormat(rb.getFormat()), fb.getWidth(), fb.getHeight()); + } + } + + public void deleteFrameBuffer(FrameBuffer fb) { + } + + public void readFrameBuffer(FrameBuffer fb, ByteBuffer byteBuf) { + if (fb != null) { + return; + } + GL gl = GLContext.getCurrentGL(); + gl.glReadPixels(vpX, vpY, vpW, vpH, GL2GL3.GL_BGRA, GL.GL_UNSIGNED_BYTE, byteBuf); + } + + private int convertTextureType(Texture.Type type, int samples, int face) { + switch (type) { + case TwoDimensional: + if (samples > 1) { + return GL2.GL_TEXTURE_2D_MULTISAMPLE; + } else { + return GL.GL_TEXTURE_2D; + } + case TwoDimensionalArray: + if (samples > 1) { + return GL2.GL_TEXTURE_2D_MULTISAMPLE_ARRAY; + } else { + return GL2.GL_TEXTURE_2D_ARRAY; + } + case ThreeDimensional: + return GL2.GL_TEXTURE_3D; + case CubeMap: + if (face < 0) { + return GL.GL_TEXTURE_CUBE_MAP; + } else if (face < 6) { + return GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X + face; + } else { + throw new UnsupportedOperationException("Invalid cube map face index: " + face); + } + default: + throw new UnsupportedOperationException("Unknown texture type: " + type); + } + } + + private int convertMagFilter(Texture.MagFilter filter) { + switch (filter) { + case Bilinear: + return GL.GL_LINEAR; + case Nearest: + return GL.GL_NEAREST; + default: + throw new UnsupportedOperationException("Unknown mag filter: " + filter); + } + } + + private int convertMinFilter(Texture.MinFilter filter) { + switch (filter) { + case Trilinear: + return GL.GL_LINEAR_MIPMAP_LINEAR; + case BilinearNearestMipMap: + return GL.GL_LINEAR_MIPMAP_NEAREST; + case NearestLinearMipMap: + return GL.GL_NEAREST_MIPMAP_LINEAR; + case NearestNearestMipMap: + return GL.GL_NEAREST_MIPMAP_NEAREST; + case BilinearNoMipMaps: + return GL.GL_LINEAR; + case NearestNoMipMaps: + return GL.GL_NEAREST; + default: + throw new UnsupportedOperationException("Unknown min filter: " + filter); + } + } + + private int convertWrapMode(Texture.WrapMode mode) { + switch (mode) { + case BorderClamp: + return GL2GL3.GL_CLAMP_TO_BORDER; + case Clamp: + return GL2.GL_CLAMP; + case EdgeClamp: + return GL.GL_CLAMP_TO_EDGE; + case Repeat: + return GL.GL_REPEAT; + case MirroredRepeat: + return GL.GL_MIRRORED_REPEAT; + default: + throw new UnsupportedOperationException("Unknown wrap mode: " + mode); + } + } + + @SuppressWarnings("fallthrough") + private void setupTextureParams(Texture tex) { + GL gl = GLContext.getCurrentGL(); + Image image = tex.getImage(); + int target = convertTextureType(tex.getType(), image != null ? image.getMultiSamples() : 1, -1); + + // filter things + int minFilter = convertMinFilter(tex.getMinFilter()); + int magFilter = convertMagFilter(tex.getMagFilter()); + gl.glTexParameteri(target, GL.GL_TEXTURE_MIN_FILTER, minFilter); + gl.glTexParameteri(target, GL.GL_TEXTURE_MAG_FILTER, magFilter); + + if (tex.getAnisotropicFilter() > 1) { + if (gl.isExtensionAvailable("GL_EXT_texture_filter_anisotropic")) { + gl.glTexParameterf(target, + GL.GL_TEXTURE_MAX_ANISOTROPY_EXT, + tex.getAnisotropicFilter()); + } + } + + if (context.pointSprite) { + return; // Attempt to fix glTexParameter crash for some ATI GPUs + } + // repeat modes + switch (tex.getType()) { + case ThreeDimensional: + case CubeMap: // cubemaps use 3D coords + gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_R, convertWrapMode(tex.getWrap(WrapAxis.R))); + case TwoDimensional: + case TwoDimensionalArray: + gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_T, convertWrapMode(tex.getWrap(WrapAxis.T))); + // fall down here is intentional.. +// case OneDimensional: + gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_S, convertWrapMode(tex.getWrap(WrapAxis.S))); + break; + default: + throw new UnsupportedOperationException("Unknown texture type: " + tex.getType()); + } + + // R to Texture compare mode + if (tex.getShadowCompareMode() != Texture.ShadowCompareMode.Off) { + gl.glTexParameteri(target, GL2.GL_TEXTURE_COMPARE_MODE, GL2.GL_COMPARE_R_TO_TEXTURE); + gl.glTexParameteri(target, GL2.GL_DEPTH_TEXTURE_MODE, GL2.GL_INTENSITY); + if (tex.getShadowCompareMode() == Texture.ShadowCompareMode.GreaterOrEqual) { + gl.glTexParameteri(target, GL2.GL_TEXTURE_COMPARE_FUNC, GL.GL_GEQUAL); + } else { + gl.glTexParameteri(target, GL2.GL_TEXTURE_COMPARE_FUNC, GL.GL_LEQUAL); + } + } + } + + /** + * Uploads the given image to the GL driver. + * + * @param img The image to upload + * @param type How the data in the image argument should be interpreted. + * @param unit The texture slot to be used to upload the image, not important + */ + public void updateTexImageData(Image img, Texture.Type type, int unit) { + int texId = img.getId(); + GL gl = GLContext.getCurrentGL(); + if (texId == -1) { + // create texture + gl.glGenTextures(1, intBuf1); + texId = intBuf1.get(0); + img.setId(texId); + objManager.registerForCleanup(img); + + statistics.onNewTexture(); + } + + // bind texture + int target = convertTextureType(type, img.getMultiSamples(), -1); + if (context.boundTextureUnit != unit) { + gl.glActiveTexture(GL.GL_TEXTURE0 + unit); + context.boundTextureUnit = unit; + } + if (context.boundTextures[unit] != img) { + gl.glBindTexture(target, texId); + context.boundTextures[unit] = img; + + statistics.onTextureUse(img, true); + } + + if (!img.hasMipmaps() && img.isGeneratedMipmapsRequired()) { + // No pregenerated mips available, + // generate from base level if required + if (!gl.isExtensionAvailable("GL_VERSION_3_0")) { + gl.glTexParameteri(target, GL2.GL_GENERATE_MIPMAP, GL.GL_TRUE); + img.setMipmapsGenerated(true); + } + } else { + // Image already has mipmaps or no mipmap generation desired. +// glTexParameteri(target, GL_TEXTURE_BASE_LEVEL, 0 ); + if (img.getMipMapSizes() != null) { + gl.glTexParameteri(target, GL2.GL_TEXTURE_MAX_LEVEL, img.getMipMapSizes().length - 1); + } + } + + int imageSamples = img.getMultiSamples(); + if (imageSamples > 1) { + if (img.getFormat().isDepthFormat()) { + img.setMultiSamples(Math.min(maxDepthTexSamples, imageSamples)); + } else { + img.setMultiSamples(Math.min(maxColorTexSamples, imageSamples)); + } + } + + // Yes, some OpenGL2 cards (GeForce 5) still dont support NPOT. + if (!gl.isExtensionAvailable("GL_ARB_texture_non_power_of_two")) { + if (img.getWidth() != 0 && img.getHeight() != 0) { + if (!FastMath.isPowerOfTwo(img.getWidth()) + || !FastMath.isPowerOfTwo(img.getHeight())) { + if (img.getData(0) == null) { + throw new RendererException("non-power-of-2 framebuffer textures are not supported by the video hardware"); + } else { + MipMapGenerator.resizeToPowerOf2(img); + } + } + } + } + + // Check if graphics card doesn't support multisample textures + if (!gl.isExtensionAvailable("GL_ARB_texture_multisample")) { + if (img.getMultiSamples() > 1) { + throw new RendererException("Multisample textures not supported by graphics hardware"); + } + } + + if (target == GL.GL_TEXTURE_CUBE_MAP) { + List data = img.getData(); + if (data.size() != 6) { + logger.log(Level.WARNING, "Invalid texture: {0}\n" + + "Cubemap textures must contain 6 data units.", img); + return; + } + for (int i = 0; i < 6; i++) { + TextureUtil.uploadTexture(img, GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, i, 0); + } + } else if (target == GL.GL_TEXTURE_2D_ARRAY) { + List data = img.getData(); + // -1 index specifies prepare data for 2D Array + TextureUtil.uploadTexture(img, target, -1, 0); + for (int i = 0; i < data.size(); i++) { + // upload each slice of 2D array in turn + // this time with the appropriate index + TextureUtil.uploadTexture(img, target, i, 0); + } + } else { + TextureUtil.uploadTexture(img, target, 0, 0); + } + + if (img.getMultiSamples() != imageSamples) { + img.setMultiSamples(imageSamples); + } + + if (gl.isExtensionAvailable("GL_VERSION_3_0")) { + if (!img.hasMipmaps() && img.isGeneratedMipmapsRequired() && img.getData() != null) { + // XXX: Required for ATI + gl.glEnable(target); + gl.glGenerateMipmap(target); + gl.glDisable(target); + img.setMipmapsGenerated(true); + } + } + + img.clearUpdateNeeded(); + } + + private void checkTexturingUsed() { + IDList textureList = context.textureIndexList; + GL gl = GLContext.getCurrentGL(); + // old mesh used texturing, new mesh doesn't use it + // should actually go through entire oldLen and + // disable texturing for each unit.. but that's for later. + if (textureList.oldLen > 0 && textureList.newLen == 0) { + gl.glDisable(GL.GL_TEXTURE_2D); + } + } + + public void setTexture(int unit, Texture tex) { + GL gl = GLContext.getCurrentGL(); + Image image = tex.getImage(); + if (image.isUpdateNeeded() || (image.isGeneratedMipmapsRequired() && !image.isMipmapsGenerated())) { + updateTexImageData(image, tex.getType(), unit); + } + + int texId = image.getId(); + assert texId != -1; + + Image[] textures = context.boundTextures; + + int type = convertTextureType(tex.getType(), image.getMultiSamples(), -1); +// if (!context.textureIndexList.moveToNew(unit)) { +// if (context.boundTextureUnit != unit){ +// gl.glActiveTexture(GL.GL_TEXTURE0 + unit); +// context.boundTextureUnit = unit; +// } +// gl.glEnable(type); +// } + + if (context.boundTextureUnit != unit) { + gl.glActiveTexture(GL.GL_TEXTURE0 + unit); + context.boundTextureUnit = unit; + } + if (textures[unit] != image) { + gl.glBindTexture(type, texId); + textures[unit] = image; + + statistics.onTextureUse(image, true); + } else { + statistics.onTextureUse(image, false); + } + + setupTextureParams(tex); + } + + public void clearTextureUnits() { + /*GL gl = GLContext.getCurrentGL(); + IDList textureList = context.textureIndexList; + Texture[] textures = context.boundTextures; + for (int i = 0; i < textureList.oldLen; i++) { + int idx = textureList.oldList[i]; + + if (context.boundTextureUnit != idx) { + gl.glActiveTexture(GL.GL_TEXTURE0 + idx); + context.boundTextureUnit = idx; + } + gl.glDisable(convertTextureType(textures[idx].getType())); + textures[idx] = null; + } + context.textureIndexList.copyNewToOld();*/ + } + + public void deleteImage(Image image) { + int texId = image.getId(); + if (texId != -1) { + intBuf1.put(0, texId); + intBuf1.position(0).limit(1); + GL gl = GLContext.getCurrentGL(); + gl.glDeleteTextures(1, intBuf1); + image.resetObject(); + + statistics.onDeleteTexture(); + } + } + + private int convertUsage(Usage usage) { + switch (usage) { + case Static: + return GL.GL_STATIC_DRAW; + case Dynamic: + return GL.GL_DYNAMIC_DRAW; + case Stream: + return GL2ES2.GL_STREAM_DRAW; + default: + throw new RuntimeException("Unknown usage type: " + usage); + } + } + + public void updateBufferData(VertexBuffer vb) { + GL gl = GLContext.getCurrentGL(); + int bufId = vb.getId(); + if (bufId == -1) { + // create buffer + gl.glGenBuffers(1, intBuf1); + bufId = intBuf1.get(0); + vb.setId(bufId); + objManager.registerForCleanup(vb); + } + + int target; + if (vb.getBufferType() == VertexBuffer.Type.Index) { + target = GL.GL_ELEMENT_ARRAY_BUFFER; + if (context.boundElementArrayVBO != bufId) { + gl.glBindBuffer(target, bufId); + context.boundElementArrayVBO = bufId; + } + } + else { + target = GL.GL_ARRAY_BUFFER; + if (context.boundArrayVBO != bufId) { + gl.glBindBuffer(target, bufId); + context.boundArrayVBO = bufId; + } + } + + int usage = convertUsage(vb.getUsage()); + Buffer data = vb.getData(); + data.rewind(); + + gl.glBufferData(target, data.capacity() * vb.getFormat().getComponentSize(), data, usage); + + vb.clearUpdateNeeded(); + } + + public void deleteBuffer(VertexBuffer vb) { + GL gl = GLContext.getCurrentGL(); + int bufId = vb.getId(); + if (bufId != -1) { + // delete buffer + intBuf1.put(0, bufId); + intBuf1.position(0).limit(1); + gl.glDeleteBuffers(1, intBuf1); + vb.resetObject(); + } + } + + private int convertArrayType(VertexBuffer.Type type) { + switch (type) { + case Position: + return GLPointerFunc.GL_VERTEX_ARRAY; + case Normal: + return GLPointerFunc.GL_NORMAL_ARRAY; + case TexCoord: + return GLPointerFunc.GL_TEXTURE_COORD_ARRAY; + case Color: + return GLPointerFunc.GL_COLOR_ARRAY; + default: + return -1; // unsupported + } + } + + private int convertVertexFormat(VertexBuffer.Format fmt) { + switch (fmt) { + case Byte: + return GL.GL_BYTE; + case Double: + return GL2GL3.GL_DOUBLE; + case Float: + return GL.GL_FLOAT; + case Half: + return GL.GL_HALF_FLOAT; + case Int: + return GL2ES2.GL_INT; + case Short: + return GL.GL_SHORT; + case UnsignedByte: + return GL.GL_UNSIGNED_BYTE; + case UnsignedInt: + return GL2ES2.GL_UNSIGNED_INT; + case UnsignedShort: + return GL.GL_UNSIGNED_SHORT; + default: + throw new UnsupportedOperationException("Unrecognized vertex format: " + fmt); + } + } + + private int convertElementMode(Mesh.Mode mode) { + switch (mode) { + case Points: + return GL.GL_POINTS; + case Lines: + return GL.GL_LINES; + case LineLoop: + return GL.GL_LINE_LOOP; + case LineStrip: + return GL.GL_LINE_STRIP; + case Triangles: + return GL.GL_TRIANGLES; + case TriangleFan: + return GL.GL_TRIANGLE_FAN; + case TriangleStrip: + return GL.GL_TRIANGLE_STRIP; + default: + throw new UnsupportedOperationException("Unrecognized mesh mode: " + mode); + } + } + + private void setVertexAttribVBO(VertexBuffer vb, VertexBuffer idb) { + GL gl = GLContext.getCurrentGL(); + int arrayType = convertArrayType(vb.getBufferType()); + if (arrayType == -1) { + return; // unsupported + } + + if (vb.isUpdateNeeded() && idb == null) { + updateBufferData(vb); + } + + int bufId = idb != null ? idb.getId() : vb.getId(); + if (context.boundArrayVBO != bufId) { + gl.glBindBuffer(GL.GL_ARRAY_BUFFER, bufId); + context.boundArrayVBO = bufId; + } + + gl.getGL2().glEnableClientState(arrayType); + context.boundAttribs[vb.getBufferType().ordinal()] = vb; + + if (vb.getBufferType() == Type.Normal) { + // normalize if requested + if (vb.isNormalized() && !context.normalizeEnabled) { + gl.glEnable(GLLightingFunc.GL_NORMALIZE); + context.normalizeEnabled = true; + } + else if (!vb.isNormalized() && context.normalizeEnabled) { + gl.glDisable(GLLightingFunc.GL_NORMALIZE); + context.normalizeEnabled = false; + } + } + + int comps = vb.getNumComponents(); + int type = convertVertexFormat(vb.getFormat()); + + switch (vb.getBufferType()) { + case Position: + gl.getGL2().glVertexPointer(comps, type, vb.getStride(), vb.getOffset()); + break; + case Normal: + gl.getGL2().glNormalPointer(type, vb.getStride(), vb.getOffset()); + break; + case Color: + gl.getGL2().glColorPointer(comps, type, vb.getStride(), vb.getOffset()); + break; + case TexCoord: + gl.getGL2().glTexCoordPointer(comps, type, vb.getStride(), vb.getOffset()); + break; + } + } + + private void drawTriangleListVBO(VertexBuffer indexBuf, Mesh mesh, int count) { + GL gl = GLContext.getCurrentGL(); + if (indexBuf.getBufferType() != VertexBuffer.Type.Index) { + throw new IllegalArgumentException("Only index buffers are allowed as triangle lists."); + } + + if (indexBuf.isUpdateNeeded()) { + updateBufferData(indexBuf); + } + + int bufId = indexBuf.getId(); + assert bufId != -1; + + if (context.boundElementArrayVBO != bufId) { + gl.glBindBuffer(GL.GL_ELEMENT_ARRAY_BUFFER, bufId); + context.boundElementArrayVBO = bufId; + } + + if (mesh.getMode() == Mode.Hybrid) { + int[] modeStart = mesh.getModeStart(); + int[] elementLengths = mesh.getElementLengths(); + + int elMode = convertElementMode(Mode.Triangles); + int fmt = convertVertexFormat(indexBuf.getFormat()); + int elSize = indexBuf.getFormat().getComponentSize(); + // int listStart = modeStart[0]; + int stripStart = modeStart[1]; + int fanStart = modeStart[2]; + int curOffset = 0; + for (int i = 0; i < elementLengths.length; i++) { + if (i == stripStart) { + elMode = convertElementMode(Mode.TriangleStrip); + } + else if (i == fanStart) { + //TriangleStrip? + elMode = convertElementMode(Mode.TriangleFan); + } + int elementLength = elementLengths[i]; + gl.glDrawElements(elMode, elementLength, fmt, curOffset); + curOffset += elementLength * elSize; + } + } + else { + gl.glDrawElements(convertElementMode(mesh.getMode()), indexBuf.getData().capacity(), + convertVertexFormat(indexBuf.getFormat()), 0); + } + } + + public void setVertexAttrib(VertexBuffer vb, VertexBuffer idb) { + GL gl = GLContext.getCurrentGL(); + int arrayType = convertArrayType(vb.getBufferType()); + if (arrayType == -1) { + return; // unsupported + } + + gl.getGL2().glEnableClientState(arrayType); + context.boundAttribs[vb.getBufferType().ordinal()] = vb; + + if (vb.getBufferType() == Type.Normal) { + // normalize if requested + if (vb.isNormalized() && !context.normalizeEnabled) { + gl.glEnable(GLLightingFunc.GL_NORMALIZE); + context.normalizeEnabled = true; + } + else if (!vb.isNormalized() && context.normalizeEnabled) { + gl.glDisable(GLLightingFunc.GL_NORMALIZE); + context.normalizeEnabled = false; + } + } + + // NOTE: Use data from interleaved buffer if specified + Buffer data = idb != null ? idb.getData() : vb.getData(); + int comps = vb.getNumComponents(); + int type = convertVertexFormat(vb.getFormat()); + data.clear(); + data.position(vb.getOffset()); + + switch (vb.getBufferType()) { + case Position: + gl.getGL2().glVertexPointer(comps, type, vb.getStride(), data); + break; + case Normal: + gl.getGL2().glNormalPointer(type, vb.getStride(), data); + break; + case Color: + gl.getGL2().glColorPointer(comps, type, vb.getStride(), data); + break; + case TexCoord: + gl.getGL2().glTexCoordPointer(comps, type, vb.getStride(), data); + break; + } + } + + public void setVertexAttrib(VertexBuffer vb) { + setVertexAttrib(vb, null); + } + + public void clearVertexAttribs() { + GL gl = GLContext.getCurrentGL(); + for (int i = 0; i < 16; i++) { + VertexBuffer vb = context.boundAttribs[i]; + if (vb != null) { + int arrayType = convertArrayType(vb.getBufferType()); + gl.getGL2().glDisableClientState(arrayType); + context.boundAttribs[vb.getBufferType().ordinal()] = null; + } + } + } + + public void drawTriangleList(VertexBuffer indexBuf, Mesh mesh, int count) { + GL gl = GLContext.getCurrentGL(); + Mesh.Mode mode = mesh.getMode(); + + Buffer indexData = indexBuf.getData(); + indexData.clear(); + if (mesh.getMode() == Mode.Hybrid) { + int[] modeStart = mesh.getModeStart(); + int[] elementLengths = mesh.getElementLengths(); + + int elMode = convertElementMode(Mode.Triangles); + int fmt = convertVertexFormat(indexBuf.getFormat()); + // int elSize = indexBuf.getFormat().getComponentSize(); + // int listStart = modeStart[0]; + int stripStart = modeStart[1]; + int fanStart = modeStart[2]; + int curOffset = 0; + for (int i = 0; i < elementLengths.length; i++) { + if (i == stripStart) { + elMode = convertElementMode(Mode.TriangleStrip); + } + else if (i == fanStart) { + elMode = convertElementMode(Mode.TriangleStrip); + } + int elementLength = elementLengths[i]; + indexData.position(curOffset); + gl.glDrawElements(elMode, elementLength, fmt, indexData); + curOffset += elementLength; + } + } + else { + gl.glDrawElements(convertElementMode(mode), indexData.capacity(), + convertVertexFormat(indexBuf.getFormat()), indexData); + } + } + + private void renderMeshDefault(Mesh mesh, int lod, int count) { + VertexBuffer indices = null; + VertexBuffer interleavedData = mesh.getBuffer(Type.InterleavedData); + IntMap buffers = mesh.getBuffers(); + if (mesh.getNumLodLevels() > 0) { + indices = mesh.getLodLevel(lod); + } + else { + indices = buffers.get(Type.Index.ordinal()); + } + for (Entry entry : buffers) { + VertexBuffer vb = entry.getValue(); + + if (vb.getBufferType() == Type.InterleavedData || vb.getUsage() == Usage.CpuOnly) { + continue; + } + + if (vb.getBufferType() == Type.Index) { + indices = vb; + } + else { + if (vb.getStride() == 0) { + // not interleaved + setVertexAttrib(vb); + } + else { + // interleaved + setVertexAttrib(vb, interleavedData); + } + } + } + + if (indices != null) { + drawTriangleList(indices, mesh, count); + } + else { + GL gl = GLContext.getCurrentGL(); + gl.glDrawArrays(convertElementMode(mesh.getMode()), 0, mesh.getVertexCount()); + } + clearVertexAttribs(); + clearTextureUnits(); + } + + private void renderMeshVBO(Mesh mesh, int lod, int count) { + GL gl = GLContext.getCurrentGL(); + VertexBuffer indices = null; + VertexBuffer interleavedData = mesh.getBuffer(Type.InterleavedData); + if (interleavedData != null && interleavedData.isUpdateNeeded()) { + updateBufferData(interleavedData); + } + IntMap buffers = mesh.getBuffers(); + if (mesh.getNumLodLevels() > 0) { + indices = mesh.getLodLevel(lod); + } + else { + indices = buffers.get(Type.Index.ordinal()); + } + for (Entry entry : buffers) { + VertexBuffer vb = entry.getValue(); + + if (vb.getBufferType() == Type.InterleavedData || vb.getUsage() == Usage.CpuOnly // ignore + // cpu-only + // buffers + || vb.getBufferType() == Type.Index) { + continue; + } + + if (vb.getStride() == 0) { + // not interleaved + setVertexAttribVBO(vb, null); + } + else { + // interleaved + setVertexAttribVBO(vb, interleavedData); + } + } + + if (indices != null) { + drawTriangleListVBO(indices, mesh, count); + } + else { + gl.glDrawArrays(convertElementMode(mesh.getMode()), 0, mesh.getVertexCount()); + } + clearVertexAttribs(); + clearTextureUnits(); + } + + private void updateDisplayList(Mesh mesh) { + GL gl = GLContext.getCurrentGL(); + if (mesh.getId() != -1) { + // delete list first + gl.getGL2().glDeleteLists(mesh.getId(), mesh.getId()); + mesh.setId(-1); + } + + // create new display list + // first set state to NULL + applyRenderState(RenderState.NULL); + + // disable lighting + setLighting(null); + + int id = gl.getGL2().glGenLists(1); + mesh.setId(id); + gl.getGL2().glNewList(id, GL2.GL_COMPILE); + renderMeshDefault(mesh, 0, 1); + gl.getGL2().glEndList(); + } + + private void renderMeshDisplayList(Mesh mesh) { + GL gl = GLContext.getCurrentGL(); + if (mesh.getId() == -1) { + updateDisplayList(mesh); + } + gl.getGL2().glCallList(mesh.getId()); + } + + public void renderMesh(Mesh mesh, int lod, int count) { + GL gl = GLContext.getCurrentGL(); + if (context.pointSize != mesh.getPointSize()) { + gl.getGL2().glPointSize(mesh.getPointSize()); + context.pointSize = mesh.getPointSize(); + } + if (context.lineWidth != mesh.getLineWidth()) { + gl.glLineWidth(mesh.getLineWidth()); + context.lineWidth = mesh.getLineWidth(); + } + + checkTexturingUsed(); + + if (vbo) { + renderMeshVBO(mesh, lod, count); + } + else { + boolean dynamic = false; + if (mesh.getNumLodLevels() == 0) { + IntMap bufs = mesh.getBuffers(); + for (Entry entry : bufs) { + if (entry.getValue().getUsage() != VertexBuffer.Usage.Static) { + dynamic = true; + break; + } + } + } + else { + dynamic = true; + } + + if (!dynamic) { + // dealing with a static object, generate display list + renderMeshDisplayList(mesh); + } + else { + renderMeshDefault(mesh, lod, count); + } + } + } + + public void invalidateState() { + context.reset(); + boundShader = null; + lastFb = null; + + GL gl = GLContext.getCurrentGL(); + gl.glGetIntegerv(GL2.GL_DRAW_BUFFER, intBuf16); + initialDrawBuf = intBuf16.get(0); + gl.glGetIntegerv(GL2.GL_READ_BUFFER, intBuf16); + initialReadBuf = intBuf16.get(0); + + } + + public void copyFrameBuffer(FrameBuffer src, FrameBuffer dst, boolean copyDepth) { + GL gl = GLContext.getCurrentGL(); + if (gl.isExtensionAvailable("GL_EXT_framebuffer_blit")) { + int srcX0 = 0; + int srcY0 = 0; + int srcX1 = 0; + int srcY1 = 0; + + int dstX0 = 0; + int dstY0 = 0; + int dstX1 = 0; + int dstY1 = 0; + + int prevFBO = context.boundFBO; + + if (mainFbOverride != null) { + if (src == null) { + src = mainFbOverride; + } + if (dst == null) { + dst = mainFbOverride; + } + } + + if (src != null && src.isUpdateNeeded()) { + updateFrameBuffer(src); + } + + if (dst != null && dst.isUpdateNeeded()) { + updateFrameBuffer(dst); + } + + if (src == null) { + gl.glBindFramebuffer(GL2.GL_READ_FRAMEBUFFER, 0); + srcX0 = vpX; + srcY0 = vpY; + srcX1 = vpX + vpW; + srcY1 = vpY + vpH; + } else { + gl.glBindFramebuffer(GL2.GL_READ_FRAMEBUFFER, src.getId()); + srcX1 = src.getWidth(); + srcY1 = src.getHeight(); + } + if (dst == null) { + gl.glBindFramebuffer(GL2.GL_DRAW_FRAMEBUFFER, 0); + dstX0 = vpX; + dstY0 = vpY; + dstX1 = vpX + vpW; + dstY1 = vpY + vpH; + } else { + gl.glBindFramebuffer(GL2.GL_DRAW_FRAMEBUFFER, dst.getId()); + dstX1 = dst.getWidth(); + dstY1 = dst.getHeight(); + } + int mask = GL.GL_COLOR_BUFFER_BIT; + if (copyDepth) { + mask |= GL.GL_DEPTH_BUFFER_BIT; + } + gl.getGL2().glBlitFramebuffer(srcX0, srcY0, srcX1, srcY1, + dstX0, dstY0, dstX1, dstY1, mask, + GL.GL_NEAREST); + + + gl.glBindFramebuffer(GL2.GL_FRAMEBUFFER, prevFBO); + try { + checkFrameBufferError(); + } catch (IllegalStateException ex) { + logger.log(Level.SEVERE, "Source FBO:\n{0}", src); + logger.log(Level.SEVERE, "Dest FBO:\n{0}", dst); + throw ex; + } + } else { + throw new RendererException("EXT_framebuffer_blit required."); + // TODO: support non-blit copies? + } + } + + public void setMainFrameBufferOverride(FrameBuffer fb) { + mainFbOverride = fb; + } +} diff --git a/engine/src/jogl/com/jme3/renderer/jogl/TextureUtil.java b/engine/src/jogl/com/jme3/renderer/jogl/TextureUtil.java new file mode 100644 index 000000000..72d247e26 --- /dev/null +++ b/engine/src/jogl/com/jme3/renderer/jogl/TextureUtil.java @@ -0,0 +1,386 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.renderer.jogl; + +import com.jme3.renderer.RendererException; +import com.jme3.texture.Image; +import com.jme3.texture.Image.Format; +import java.nio.ByteBuffer; +import javax.media.opengl.GL; +import javax.media.opengl.GL2; +import javax.media.opengl.GL2ES2; +import javax.media.opengl.GL3; +import javax.media.opengl.GLContext; + +public class TextureUtil { + + public static int convertTextureFormat(Format fmt) { + switch (fmt) { + case Alpha16: + case Alpha8: + return GL.GL_ALPHA; + case Luminance8Alpha8: + case Luminance16Alpha16: + return GL.GL_LUMINANCE_ALPHA; + case Luminance8: + case Luminance16: + return GL.GL_LUMINANCE; + case RGB10: + case RGB16: + case BGR8: + case RGB8: + case RGB565: + return GL.GL_RGB; + case RGB5A1: + case RGBA16: + case RGBA8: + return GL.GL_RGBA; + case Depth: + return GL2ES2.GL_DEPTH_COMPONENT; + default: + throw new UnsupportedOperationException("Unrecognized format: " + fmt); + } + } + + public static class GLImageFormat { + + int internalFormat; + int format; + int dataType; + boolean compressed; + + public GLImageFormat(int internalFormat, int format, int dataType, boolean compressed) { + this.internalFormat = internalFormat; + this.format = format; + this.dataType = dataType; + this.compressed = compressed; + } + } + + private static final GLImageFormat[] formatToGL = new GLImageFormat[Format.values().length]; + + private static void setFormat(Format format, int glInternalFormat, int glFormat, int glDataType, boolean glCompressed){ + formatToGL[format.ordinal()] = new GLImageFormat(glInternalFormat, glFormat, glDataType, glCompressed); + } + + static { + // Alpha formats + setFormat(Format.Alpha8, GL2.GL_ALPHA8, GL.GL_ALPHA, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.Alpha16, GL2.GL_ALPHA16, GL.GL_ALPHA, GL.GL_UNSIGNED_SHORT, false); + + // Luminance formats + setFormat(Format.Luminance8, GL2.GL_LUMINANCE8, GL.GL_LUMINANCE, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.Luminance16, GL2.GL_LUMINANCE16, GL.GL_LUMINANCE, GL.GL_UNSIGNED_SHORT, false); + setFormat(Format.Luminance16F, GL2.GL_LUMINANCE16F_ARB, GL.GL_LUMINANCE, GL.GL_HALF_FLOAT, false); + setFormat(Format.Luminance32F, GL.GL_LUMINANCE32F_ARB, GL.GL_LUMINANCE, GL.GL_FLOAT, false); + + // Luminance alpha formats + setFormat(Format.Luminance8Alpha8, GL2.GL_LUMINANCE8_ALPHA8, GL.GL_LUMINANCE_ALPHA, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.Luminance16Alpha16, GL2.GL_LUMINANCE16_ALPHA16, GL.GL_LUMINANCE_ALPHA, GL.GL_UNSIGNED_SHORT, false); + setFormat(Format.Luminance16FAlpha16F, GL2.GL_LUMINANCE_ALPHA16F_ARB, GL2.GL_LUMINANCE_ALPHA, GL2.GL_HALF_FLOAT, false); + + // Depth formats + setFormat(Format.Depth, GL2.GL_DEPTH_COMPONENT, GL2.GL_DEPTH_COMPONENT, GL2.GL_UNSIGNED_BYTE, false); + setFormat(Format.Depth16, GL2.GL_DEPTH_COMPONENT16, GL2.GL_DEPTH_COMPONENT, GL2.GL_UNSIGNED_SHORT, false); + setFormat(Format.Depth24, GL2.GL_DEPTH_COMPONENT24, GL2.GL_DEPTH_COMPONENT, GL2.GL_UNSIGNED_INT, false); + setFormat(Format.Depth32, GL2.GL_DEPTH_COMPONENT32, GL2.GL_DEPTH_COMPONENT, GL2.GL_UNSIGNED_INT, false); + setFormat(Format.Depth32F, GL2.GL_DEPTH_COMPONENT32F, GL2.GL_DEPTH_COMPONENT, GL2.GL_FLOAT, false); + + // Depth stencil formats + setFormat(Format.Depth24Stencil8, GL3.GL_DEPTH24_STENCIL8, GL3.GL_DEPTH_STENCIL, GL3.GL_UNSIGNED_INT_24_8, false); + + // RGB formats + setFormat(Format.BGR8, GL.GL_RGB8, GL2.GL_BGR, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.RGB8, GL.GL_RGB8, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, false); +// setFormat(Format.RGB10, GL11.GL_RGB10, GL11.GL_RGB, GL12.GL_UNSIGNED_INT_10_10_10_2, false); + setFormat(Format.RGB16, GL2.GL_RGB16, GL.GL_RGB, GL.GL_UNSIGNED_SHORT, false); + setFormat(Format.RGB16F, GL2ES2.GL_RGB16F, GL.GL_RGB, GL.GL_HALF_FLOAT, false); + setFormat(Format.RGB32F, GL.GL_RGB32F, GL.GL_RGB, GL.GL_FLOAT, false); + + // Special RGB formats + setFormat(Format.RGB111110F, GL2.GL_R11F_G11F_B10F, GL.GL_RGB, GL.GL_UNSIGNED_INT_10F_11F_11F_REV, false); + setFormat(Format.RGB9E5, GL2.GL_RGB9_E5, GL.GL_RGB, GL2.GL_UNSIGNED_INT_5_9_9_9_REV, false); + setFormat(Format.RGB16F_to_RGB111110F, GL2.GL_R11F_G11F_B10F, GL.GL_RGB, GL.GL_HALF_FLOAT, false); + setFormat(Format.RGB16F_to_RGB9E5, GL2.GL_RGB9_E5, GL.GL_RGB, GL.GL_HALF_FLOAT, false); + + // RGBA formats + setFormat(Format.ABGR8, GL.GL_RGBA8, GL2.GL_ABGR_EXT, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.RGB5A1, GL.GL_RGB5_A1, GL.GL_RGBA, GL.GL_UNSIGNED_SHORT_5_5_5_1, false); + setFormat(Format.ARGB4444, GL.GL_RGBA4, GL2.GL_ABGR_EXT, GL.GL_UNSIGNED_SHORT_4_4_4_4, false); + setFormat(Format.RGBA8, GL.GL_RGBA8, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false); + setFormat(Format.RGBA16, GL2.GL_RGBA16, GL.GL_RGBA, GL.GL_UNSIGNED_SHORT, false); // might be incorrect + setFormat(Format.RGBA16F, GL2.GL_RGBA16F, GL.GL_RGBA, GL.GL_HALF_FLOAT, false); + setFormat(Format.RGBA32F, GL.GL_RGBA32F, GL.GL_RGBA, GL.GL_FLOAT, false); + + // DXT formats + setFormat(Format.DXT1, GL.GL_COMPRESSED_RGB_S3TC_DXT1_EXT, GL.GL_RGB, GL2.GL_UNSIGNED_BYTE, true); + setFormat(Format.DXT1A, GL.GL_COMPRESSED_RGBA_S3TC_DXT1_EXT, GL.GL_RGBA, GL2.GL_UNSIGNED_BYTE, true); + setFormat(Format.DXT3, GL.GL_COMPRESSED_RGBA_S3TC_DXT3_EXT, GL.GL_RGBA, GL2.GL_UNSIGNED_BYTE, true); + setFormat(Format.DXT5, GL.GL_COMPRESSED_RGBA_S3TC_DXT5_EXT, GL.GL_RGBA, GL2.GL_UNSIGNED_BYTE, true); + + // LTC/LATC/3Dc formats + setFormat(Format.LTC, GL2.GL_COMPRESSED_LUMINANCE_LATC1_EXT, GL.GL_LUMINANCE, GL.GL_UNSIGNED_BYTE, true); + setFormat(Format.LATC, GL2.GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT, GL.GL_LUMINANCE_ALPHA, GL.GL_UNSIGNED_BYTE, true); + } + + public static GLImageFormat getImageFormat(Format fmt){ + GL gl = GLContext.getCurrentGL(); + switch (fmt){ + case ABGR8: + if (!gl.isExtensionAvailable("GL_EXT_abgr")){ + return null; + } + break; + case BGR8: + if (!gl.isExtensionAvailable("GL_VERSION_1_2") && !gl.isExtensionAvailable("EXT_bgra")){ + return null; + } + break; + case DXT1: + case DXT1A: + case DXT3: + case DXT5: + if (!gl.isExtensionAvailable("GL_EXT_texture_compression_s3tc")) { + return null; + } + break; + case Depth: + case Depth16: + case Depth24: + case Depth32: + if (!gl.isExtensionAvailable("GL_VERSION_1_4") && !gl.isExtensionAvailable("ARB_depth_texture")){ + return null; + } + break; + case Depth24Stencil8: + if (!gl.isExtensionAvailable("GL_VERSION_3_0")){ + return null; + } + break; + case Luminance16F: + case Luminance16FAlpha16F: + case Luminance32F: + if (!gl.isExtensionAvailable("GL_ARB_texture_float")){ + return null; + } + break; + case RGB16F: + case RGB32F: + case RGBA16F: + case RGBA32F: + if (!gl.isExtensionAvailable("GL_VERSION_3_0") && !gl.isExtensionAvailable("GL_ARB_texture_float")){ + return null; + } + break; + case Depth32F: + if (!gl.isExtensionAvailable("GL_VERSION_3_0") && !gl.isExtensionAvailable("GL_NV_depth_buffer_float")){ + return null; + } + break; + case LATC: + case LTC: + if (!gl.isExtensionAvailable("GL_EXT_texture_compression_latc")){ + return null; + } + break; + case RGB9E5: + case RGB16F_to_RGB9E5: + if (!gl.isExtensionAvailable("GL_VERSION_3_0") && !gl.isExtensionAvailable("GL_EXT_texture_shared_exponent")){ + return null; + } + break; + case RGB111110F: + case RGB16F_to_RGB111110F: + if (!gl.isExtensionAvailable("GL_VERSION_3_0") && !gl.isExtensionAvailable("GL_EXT_packed_float")){ + return null; + } + break; + } + return formatToGL[fmt.ordinal()]; + } + + public static GLImageFormat getImageFormatWithError(Format fmt) { + GLImageFormat glFmt = getImageFormat(fmt); + if (glFmt == null) { + throw new RendererException("Image format '" + fmt + "' is unsupported by the video hardware."); + } + return glFmt; + } + + public static void uploadTexture(Image image, + int target, + int index, + int border){ + GL gl = GLContext.getCurrentGL(); + Image.Format fmt = image.getFormat(); + GLImageFormat glFmt = getImageFormatWithError(fmt); + + ByteBuffer data; + if (index >= 0 && image.getData() != null && image.getData().size() > 0){ + data = image.getData(index); + }else{ + data = null; + } + + int width = image.getWidth(); + int height = image.getHeight(); + int depth = image.getDepth(); + + if (data != null) { + gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1); + } + + int[] mipSizes = image.getMipMapSizes(); + int pos = 0; + // TODO: Remove unneccessary allocation + if (mipSizes == null){ + if (data != null) + mipSizes = new int[]{ data.capacity() }; + else + mipSizes = new int[]{ width * height * fmt.getBitsPerPixel() / 8 }; + } + + boolean subtex = false; + int samples = image.getMultiSamples(); + + for (int i = 0; i < mipSizes.length; i++){ + int mipWidth = Math.max(1, width >> i); + int mipHeight = Math.max(1, height >> i); + int mipDepth = Math.max(1, depth >> i); + + if (data != null){ + data.position(pos); + data.limit(pos + mipSizes[i]); + } + + if (glFmt.compressed && data != null){ + if (target == GL2.GL_TEXTURE_3D){ + gl.getGL2().glCompressedTexImage3D(target, + i, + glFmt.internalFormat, + mipWidth, + mipHeight, + mipDepth, + 0, + border, + data); + }else{ + //all other targets use 2D: array, cubemap, 2d + gl.getGL2().glCompressedTexImage2D(target, + i, + glFmt.internalFormat, + mipWidth, + mipHeight, + 0, + border, + data); + } + }else{ + if (target == GL2.GL_TEXTURE_3D){ + gl.getGL2().glTexImage3D(target, + i, + glFmt.internalFormat, + mipWidth, + mipHeight, + mipDepth, + border, + glFmt.format, + glFmt.dataType, + data); + }else if (target == GL.GL_TEXTURE_2D_ARRAY){ + // prepare data for 2D array + // or upload slice + if (index == -1){ + gl.getGL2().glTexImage3D(target, + 0, + glFmt.internalFormat, + mipWidth, + mipHeight, + image.getData().size(), //# of slices + border, + glFmt.format, + glFmt.dataType, + data); + }else{ + gl.getGL2().glTexSubImage3D(target, + i, // level + 0, // xoffset + 0, // yoffset + index, // zoffset + width, // width + height, // height + 1, // depth + glFmt.format, + glFmt.dataType, + data); + } + }else{ + if (subtex){ + if (samples > 1){ + throw new IllegalStateException("Cannot update multisample textures"); + } + + gl.getGL2().glTexSubImage2D(target, + i, + 0, 0, + mipWidth, mipHeight, + glFmt.format, + glFmt.dataType, + data); + }else{ + if (samples > 1){ + gl.getGL2().glTexImage2DMultisample(target, + samples, + glFmt.internalFormat, + mipWidth, + mipHeight, + true); + }else{ + gl.getGL2().glTexImage2D(target, + i, + glFmt.internalFormat, + mipWidth, + mipHeight, + border, + glFmt.format, + glFmt.dataType, + data); + } + } + } + } + + pos += mipSizes[i]; + } + } +} diff --git a/engine/src/jogl/com/jme3/system/jogl/JoglAbstractDisplay.java b/engine/src/jogl/com/jme3/system/jogl/JoglAbstractDisplay.java new file mode 100644 index 000000000..a96a6a2c2 --- /dev/null +++ b/engine/src/jogl/com/jme3/system/jogl/JoglAbstractDisplay.java @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.system.jogl; + +import com.jme3.input.KeyInput; +import com.jme3.input.MouseInput; +import com.jme3.input.TouchInput; +import com.jme3.input.awt.AwtKeyInput; +import com.jme3.input.awt.AwtMouseInput; +import com.jme3.renderer.jogl.JoglRenderer; +import com.jogamp.opengl.util.Animator; +import com.jogamp.opengl.util.AnimatorBase; +import com.jogamp.opengl.util.FPSAnimator; +import java.awt.GraphicsDevice; +import java.awt.GraphicsEnvironment; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.logging.Logger; +import javax.media.opengl.GLAutoDrawable; +import javax.media.opengl.GLCapabilities; +import javax.media.opengl.GLEventListener; +import javax.media.opengl.GLProfile; +import javax.media.opengl.awt.GLCanvas; + +public abstract class JoglAbstractDisplay extends JoglContext implements GLEventListener { + + private static final Logger logger = Logger.getLogger(JoglAbstractDisplay.class.getName()); + + protected GraphicsDevice device; + + protected GLCanvas canvas; + + protected AnimatorBase animator; + + protected AtomicBoolean active = new AtomicBoolean(false); + + protected boolean wasActive = false; + + protected int frameRate; + + protected boolean useAwt = true; + + protected AtomicBoolean autoFlush = new AtomicBoolean(true); + + protected boolean wasAnimating = false; + + protected void initGLCanvas() { + device = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice(); + + GLCapabilities caps = new GLCapabilities(GLProfile.getDefault()); + caps.setHardwareAccelerated(true); + caps.setDoubleBuffered(true); + caps.setStencilBits(settings.getStencilBits()); + caps.setDepthBits(settings.getDepthBits()); + + if (settings.getSamples() > 1) { + caps.setSampleBuffers(true); + caps.setNumSamples(settings.getSamples()); + } + + canvas = new GLCanvas(caps) { + @Override + public void addNotify() { + super.addNotify(); + onCanvasAdded(); + } + + @Override + public void removeNotify() { + onCanvasRemoved(); + super.removeNotify(); + } + }; + if (settings.isVSync()) { + // FIXME: it is too early to get the GL instance from the canvas + canvas.getGL().setSwapInterval(1); + } + canvas.setFocusable(true); + canvas.setIgnoreRepaint(true); + canvas.addGLEventListener(this); + + // N.B: it is too early to get the GL instance from the canvas + // if (false){ + // trace mode + // jME already uses err stream, use out instead + // gl = new TraceGL(gl, System.out); + // }else if (false){ + // debug mode + // gl = new DebugGL(gl); + // }else{ + // production mode + // } + renderer = new JoglRenderer(); + } + + protected void startGLCanvas() { + if (frameRate > 0) { + animator = new FPSAnimator(canvas, frameRate); + // ((FPSAnimator)animator).setRunAsFastAsPossible(true); + } + else { + animator = new Animator(canvas); + ((Animator) animator).setRunAsFastAsPossible(true); + } + + animator.start(); + wasAnimating = true; + } + + protected void onCanvasAdded() { + } + + protected void onCanvasRemoved() { + } + + @Override + public KeyInput getKeyInput() { + return new AwtKeyInput(/*canvas*/); + } + + @Override + public MouseInput getMouseInput() { + return new AwtMouseInput(/*canvas*/); + } + + public TouchInput getTouchInput() { + return null; + } + + public void setAutoFlushFrames(boolean enabled) { + autoFlush.set(enabled); + } + + /** + * Callback. + */ + public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) { + listener.reshape(width, height); + } + + /** + * Callback. + */ + public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) { + } + + /** + * Callback + */ + public void dispose(GLAutoDrawable drawable) { + + } +} diff --git a/engine/src/jogl/com/jme3/system/jogl/JoglCanvas.java b/engine/src/jogl/com/jme3/system/jogl/JoglCanvas.java new file mode 100644 index 000000000..e626cbefd --- /dev/null +++ b/engine/src/jogl/com/jme3/system/jogl/JoglCanvas.java @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.system.jogl; + +import com.jme3.system.JmeCanvasContext; +import java.awt.Canvas; +import java.util.logging.Logger; +import javax.media.opengl.GLAutoDrawable; + +public class JoglCanvas extends JoglAbstractDisplay implements JmeCanvasContext { + + private static final Logger logger = Logger.getLogger(JoglCanvas.class.getName()); + private int width, height; + + public JoglCanvas(){ + super(); + initGLCanvas(); + } + + public Type getType() { + return Type.Canvas; + } + + public void setTitle(String title) { + } + + public void restart() { + } + + public void create(boolean waitFor){ + if (waitFor) + waitFor(true); + } + + public void destroy(boolean waitFor){ + if (waitFor) + waitFor(false); + } + + @Override + protected void onCanvasRemoved(){ + super.onCanvasRemoved(); + created.set(false); + waitFor(false); + } + + @Override + protected void onCanvasAdded(){ + startGLCanvas(); + } + + public void init(GLAutoDrawable drawable) { + canvas.requestFocus(); + + super.internalCreate(); + logger.info("Display created."); + + renderer.initialize(); + listener.initialize(); + } + + public void display(GLAutoDrawable glad) { + if (!created.get() && renderer != null){ + listener.destroy(); + logger.info("Canvas destroyed."); + super.internalDestroy(); + return; + } + + if (width != canvas.getWidth() || height != canvas.getHeight()){ + width = canvas.getWidth(); + height = canvas.getHeight(); + if (listener != null) + listener.reshape(width, height); + } + + boolean flush = autoFlush.get(); + if (flush && !wasAnimating){ + animator.start(); + wasAnimating = true; + }else if (!flush && wasAnimating){ + animator.stop(); + wasAnimating = false; + } + + listener.update(); + renderer.onFrame(); + + } + + public Canvas getCanvas() { + return canvas; + } + + @Override + public void dispose(GLAutoDrawable arg0) { + } + +} diff --git a/engine/src/jogl/com/jme3/system/jogl/JoglContext.java b/engine/src/jogl/com/jme3/system/jogl/JoglContext.java new file mode 100644 index 000000000..ae43213c7 --- /dev/null +++ b/engine/src/jogl/com/jme3/system/jogl/JoglContext.java @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.system.jogl; + +import com.jme3.input.JoyInput; +import com.jme3.input.KeyInput; +import com.jme3.input.MouseInput; +import com.jme3.input.awt.AwtKeyInput; +import com.jme3.input.awt.AwtMouseInput; +import com.jme3.renderer.Renderer; +import com.jme3.renderer.jogl.JoglRenderer; +import com.jme3.system.AppSettings; +import com.jme3.system.JmeContext; +import com.jme3.system.NanoTimer; +import com.jme3.system.SystemListener; +import com.jme3.system.Timer; +import java.util.concurrent.atomic.AtomicBoolean; + +public abstract class JoglContext implements JmeContext { + + protected AtomicBoolean created = new AtomicBoolean(false); + protected AtomicBoolean renderable = new AtomicBoolean(false); + protected final Object createdLock = new Object(); + + protected AppSettings settings = new AppSettings(true); + protected JoglRenderer renderer; + protected Timer timer; + protected SystemListener listener; + + protected AwtKeyInput keyInput; + protected AwtMouseInput mouseInput; + + public void setSystemListener(SystemListener listener){ + this.listener = listener; + } + + public void setSettings(AppSettings settings) { + this.settings.copyFrom(settings); + } + + public boolean isRenderable(){ + return renderable.get(); + } + + public AppSettings getSettings() { + return settings; + } + + public Renderer getRenderer() { + return renderer; + } + + public MouseInput getMouseInput() { + return mouseInput; + } + + public KeyInput getKeyInput() { + return keyInput; + } + + public JoyInput getJoyInput() { + return null; + } + + public Timer getTimer() { + return timer; + } + + public boolean isCreated() { + return created.get(); + } + + public void create(){ + create(false); + } + + public void destroy(){ + destroy(false); + } + + protected void waitFor(boolean createdVal){ + synchronized (createdLock){ + while (created.get() != createdVal){ + try { + createdLock.wait(); + } catch (InterruptedException ex) { + } + } + } + } + + public void internalCreate() { + timer = new NanoTimer(); + synchronized (createdLock){ + created.set(true); + createdLock.notifyAll(); + } + // renderer initialization must happen in subclass. + /*if (renderable.get()){ + initContextFirstTime(); + }else{ + assert getType() == Type.Canvas; + }*/ + } + + protected void internalDestroy() { + renderer = null; + timer = null; + renderable.set(false); + synchronized (createdLock){ + created.set(false); + createdLock.notifyAll(); + } + } + +} diff --git a/engine/src/jogl/com/jme3/system/jogl/JoglDisplay.java b/engine/src/jogl/com/jme3/system/jogl/JoglDisplay.java new file mode 100644 index 000000000..7a8eb3465 --- /dev/null +++ b/engine/src/jogl/com/jme3/system/jogl/JoglDisplay.java @@ -0,0 +1,307 @@ +/* + * Copyright (c) 2009-2012 jMonkeyEngine + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * * Neither the name of 'jMonkeyEngine' nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.jme3.system.jogl; + +import com.jme3.system.AppSettings; +import java.awt.BorderLayout; +import java.awt.Container; +import java.awt.Dimension; +import java.awt.DisplayMode; +import java.awt.Frame; +import java.awt.Toolkit; +import java.awt.event.WindowAdapter; +import java.awt.event.WindowEvent; +import java.lang.reflect.InvocationTargetException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.media.opengl.GLAutoDrawable; +import javax.swing.JFrame; +import javax.swing.SwingUtilities; + +public class JoglDisplay extends JoglAbstractDisplay { + + private static final Logger logger = Logger.getLogger(JoglDisplay.class.getName()); + + protected AtomicBoolean windowCloseRequest = new AtomicBoolean(false); + protected AtomicBoolean needClose = new AtomicBoolean(false); + protected AtomicBoolean needRestart = new AtomicBoolean(false); + protected boolean wasInited = false; + protected Frame frame; + + public Type getType() { + return Type.Display; + } + + protected DisplayMode getFullscreenDisplayMode(DisplayMode[] modes, int width, int height, int bpp, int freq){ + for (DisplayMode mode : modes){ + if (mode.getWidth() == width + && mode.getHeight() == height + && (mode.getBitDepth() == DisplayMode.BIT_DEPTH_MULTI + || mode.getBitDepth() == bpp + || (mode.getBitDepth() == 32 && bpp==24)) + && mode.getRefreshRate() == freq){ + return mode; + } + } + return null; + } + + protected void createGLFrame(){ + Container contentPane; + if (useAwt){ + frame = new Frame(settings.getTitle()); + contentPane = frame; + }else{ + frame = new JFrame(settings.getTitle()); + contentPane = ((JFrame)frame).getContentPane(); + } + + contentPane.setLayout(new BorderLayout()); + + applySettings(settings); + + frame.setResizable(false); + frame.setFocusable(true); + + // only add canvas after frame is visible + contentPane.add(canvas, BorderLayout.CENTER); + //frame.pack(); +// frame.setSize(contentPane.getPreferredSize()); + frame.setSize(settings.getWidth(),settings.getHeight()); + + if (device.getFullScreenWindow() == null){ + // now that canvas is attached, + // determine optimal size to contain it + + Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); + frame.setLocation((screenSize.width - frame.getWidth()) / 2, + (screenSize.height - frame.getHeight()) / 2); + } + + frame.addWindowListener(new WindowAdapter() { + @Override + public void windowClosing(WindowEvent evt) { + windowCloseRequest.set(true); + } + @Override + public void windowActivated(WindowEvent evt) { + active.set(true); + } + + @Override + public void windowDeactivated(WindowEvent evt) { + active.set(false); + } + }); + } + + protected void applySettings(AppSettings settings){ + DisplayMode displayMode; + if (settings.getWidth() <= 0 || settings.getHeight() <= 0){ + displayMode = device.getDisplayMode(); + settings.setResolution(displayMode.getWidth(), displayMode.getHeight()); + }else if (settings.isFullscreen()){ + displayMode = getFullscreenDisplayMode(device.getDisplayModes(), + settings.getWidth(), settings.getHeight(), + settings.getBitsPerPixel(), settings.getFrequency()); + if (displayMode == null) + throw new RuntimeException("Unable to find fullscreen display mode matching settings"); + }else{ + displayMode = new DisplayMode(settings.getWidth(), settings.getHeight(), DisplayMode.BIT_DEPTH_MULTI, DisplayMode.REFRESH_RATE_UNKNOWN); + } + + // FIXME: seems to return false even though + // it is supported.. +// if (!device.isDisplayChangeSupported()){ +// // must use current device mode if display mode change not supported +// displayMode = device.getDisplayMode(); +// settings.setResolution(displayMode.getWidth(), displayMode.getHeight()); +// } + + frameRate = settings.getFrameRate(); + logger.log(Level.INFO, "Selected display mode: {0}x{1}x{2} @{3}", + new Object[]{displayMode.getWidth(), + displayMode.getHeight(), + displayMode.getBitDepth(), + displayMode.getRefreshRate()}); + + canvas.setSize(displayMode.getWidth(), displayMode.getHeight()); + + DisplayMode prevDisplayMode = device.getDisplayMode(); + + if (settings.isFullscreen() && device.isFullScreenSupported()){ + frame.setUndecorated(true); + + try{ + device.setFullScreenWindow(frame); + if (!prevDisplayMode.equals(displayMode) + && device.isDisplayChangeSupported()){ + device.setDisplayMode(displayMode); + } + } catch (Throwable t){ + logger.log(Level.SEVERE, "Failed to enter fullscreen mode", t); + device.setFullScreenWindow(null); + } + }else{ + if (!device.isFullScreenSupported()){ + logger.warning("Fullscreen not supported."); + }else{ + frame.setUndecorated(false); + device.setFullScreenWindow(null); + } + + frame.setVisible(true); + } + } + + private void initInEDT(){ + initGLCanvas(); + + createGLFrame(); + + startGLCanvas(); + } + + public void init(GLAutoDrawable drawable){ + // prevent initializing twice on restart + if (!wasInited){ + canvas.requestFocus(); + + super.internalCreate(); + logger.info("Display created."); + + renderer.initialize(); + listener.initialize(); + + wasInited = true; + } + } + + public void create(boolean waitFor){ + try { + if (waitFor){ + try{ + SwingUtilities.invokeAndWait(new Runnable() { + public void run() { + initInEDT(); + } + }); + } catch (InterruptedException ex) { + listener.handleError("Interrupted", ex); + } + }else{ + SwingUtilities.invokeLater(new Runnable() { + public void run() { + initInEDT(); + } + }); + } + } catch (InvocationTargetException ex) { + throw new AssertionError(); // can never happen + } + } + + public void destroy(boolean waitFor){ + needClose.set(true); + if (waitFor){ + waitFor(false); + } + } + + public void restart() { + if (created.get()){ + needRestart.set(true); + }else{ + throw new IllegalStateException("Display not started yet. Cannot restart"); + } + } + + public void setTitle(String title){ + if (frame != null) + frame.setTitle(title); + } + + /** + * Callback. + */ + public void display(GLAutoDrawable drawable) { + if (needClose.get()) { + listener.destroy(); + animator.stop(); + if (settings.isFullscreen()) { + device.setFullScreenWindow(null); + } + frame.dispose(); + logger.info("Display destroyed."); + super.internalDestroy(); + return; + } + + if (windowCloseRequest.get()){ + listener.requestClose(false); + windowCloseRequest.set(false); + } + + if (needRestart.getAndSet(false)){ + // for restarting contexts + if (frame.isVisible()){ + animator.stop(); + frame.dispose(); + createGLFrame(); + startGLCanvas(); + } + } + +// boolean flush = autoFlush.get(); +// if (animator.isAnimating() != flush){ +// if (flush) +// animator.stop(); +// else +// animator.start(); +// } + + if (wasActive != active.get()){ + if (!wasActive){ + listener.gainFocus(); + wasActive = true; + }else{ + listener.loseFocus(); + wasActive = false; + } + } + + listener.update(); + renderer.onFrame(); + } +}