* Don't depend on ContextCapabilities for determining OGL / GLSL versions

experimental
shadowislord 10 years ago
parent e225e6ff89
commit 4d7dcc17bb
  1. 81
      jme3-lwjgl/src/main/java/com/jme3/renderer/lwjgl/LwjglRenderer.java

@ -148,18 +148,56 @@ public class LwjglRenderer implements Renderer {
return caps;
}
private static int extractVersion(String prefixStr, String versionStr) {
if (versionStr != null) {
int spaceIdx = versionStr.indexOf(" ", prefixStr.length());
if (spaceIdx >= 1) {
versionStr = versionStr.substring(prefixStr.length(), spaceIdx).trim();
} else {
versionStr = versionStr.substring(prefixStr.length()).trim();
}
// Some device have ":" at the end of the version.
versionStr = versionStr.replaceAll("\\:", "");
// Pivot on first point.
int firstPoint = versionStr.indexOf(".");
// Remove everything after second point.
int secondPoint = versionStr.indexOf(".", firstPoint + 1);
if (secondPoint != -1) {
versionStr = versionStr.substring(0, secondPoint);
}
String majorVerStr = versionStr.substring(0, firstPoint);
String minorVerStr = versionStr.substring(firstPoint + 1);
if (minorVerStr.endsWith("0") && minorVerStr.length() > 1) {
minorVerStr = minorVerStr.substring(0, minorVerStr.length() - 1);
}
int majorVer = Integer.parseInt(majorVerStr);
int minorVer = Integer.parseInt(minorVerStr);
return majorVer * 100 + minorVer * 10;
} else {
return -1;
}
}
@SuppressWarnings("fallthrough")
public void initialize() {
ctxCaps = GLContext.getCapabilities();
if (ctxCaps.OpenGL20) {
int oglVer = extractVersion("", glGetString(GL_VERSION));
if (oglVer >= 200) {
caps.add(Caps.OpenGL20);
if (ctxCaps.OpenGL21) {
if (oglVer >= 210) {
caps.add(Caps.OpenGL21);
if (ctxCaps.OpenGL30) {
if (oglVer >= 300) {
caps.add(Caps.OpenGL30);
if (ctxCaps.OpenGL31) {
if (oglVer >= 310) {
caps.add(Caps.OpenGL31);
if (ctxCaps.OpenGL32) {
if (oglVer >= 320) {
caps.add(Caps.OpenGL32);
}
}
@ -167,20 +205,6 @@ public class LwjglRenderer implements Renderer {
}
}
//workaround, always assume we support GLSL100
//some cards just don't report this correctly
caps.add(Caps.GLSL100);
String versionStr = null;
if (ctxCaps.OpenGL20) {
versionStr = glGetString(GL_SHADING_LANGUAGE_VERSION);
}
if (versionStr == null || versionStr.equals("")) {
throw new UnsupportedOperationException("GLSL and OpenGL2 is "
+ "required for the LWJGL "
+ "renderer!");
}
// Fix issue in TestRenderToMemory when GL_FRONT is the main
// buffer being used.
context.initialDrawBuf = glGetInteger(GL_DRAW_BUFFER);
@ -192,22 +216,14 @@ public class LwjglRenderer implements Renderer {
// initialDrawBuf = GL_BACK;
// initialReadBuf = GL_BACK;
int spaceIdx = versionStr.indexOf(" ");
if (spaceIdx >= 1) {
versionStr = versionStr.substring(0, spaceIdx);
}
float version = Float.parseFloat(versionStr);
int glslVer = (int) (version * 100);
int glslVer = extractVersion("", glGetString(GL_SHADING_LANGUAGE_VERSION));
switch (glslVer) {
default:
if (glslVer < 400) {
break;
}
// so that future OpenGL revisions wont break jme3
// fall through intentional
case 400:
case 330:
@ -226,10 +242,11 @@ public class LwjglRenderer implements Renderer {
break;
}
if (!caps.contains(Caps.GLSL100)) {
logger.log(Level.WARNING, "Force-adding GLSL100 support, since OpenGL2 is supported.");
// Workaround, always assume we support GLSL100.
// Some cards just don't report this correctly.
caps.add(Caps.GLSL100);
}
ctxCaps = GLContext.getCapabilities();
glGetInteger(GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, intBuf16);
vertexTextureUnits = intBuf16.get(0);

Loading…
Cancel
Save