Using one UV buffer for all types of texture mapping (the textures are cast to different UV's when neded). Support for using generated textures as normal maps (looks best when large generatedTexturePPU is used and on large faces).

git-svn-id: https://jmonkeyengine.googlecode.com/svn/trunk@9576 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
3.0
Kae..pl 13 years ago
parent 22f0410e78
commit 3a5b8bb3d6
  1. 91
      engine/src/blender/com/jme3/scene/plugins/blender/materials/MaterialContext.java
  2. 169
      engine/src/blender/com/jme3/scene/plugins/blender/textures/CombinedTexture.java
  3. 114
      engine/src/blender/com/jme3/scene/plugins/blender/textures/TextureHelper.java
  4. 161
      engine/src/blender/com/jme3/scene/plugins/blender/textures/TriangulatedTexture.java

@ -1,10 +1,5 @@
package com.jme3.scene.plugins.blender.materials;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -12,10 +7,6 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import jme3tools.converters.ImageToAwt;
import com.jme3.material.Material;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.material.RenderState.FaceCullMode;
@ -37,7 +28,6 @@ import com.jme3.scene.plugins.blender.textures.CombinedTexture;
import com.jme3.scene.plugins.blender.textures.TextureHelper;
import com.jme3.scene.plugins.blender.textures.blending.TextureBlender;
import com.jme3.scene.plugins.blender.textures.blending.TextureBlenderFactory;
import com.jme3.texture.Image;
import com.jme3.texture.Texture;
import com.jme3.util.BufferUtils;
@ -136,7 +126,7 @@ public final class MaterialContext {
TextureHelper textureHelper = blenderContext.getHelper(TextureHelper.class);
for(Entry<Number, List<TextureData>> entry : textureDataMap.entrySet()) {
if(entry.getValue().size()>0) {
CombinedTexture combinedTexture = new CombinedTexture();
CombinedTexture combinedTexture = new CombinedTexture(entry.getKey().intValue());
for(TextureData textureData : entry.getValue()) {
int texflag = ((Number) textureData.mtex.getFieldValue("texflag")).intValue();
boolean negateTexture = (texflag & 0x04) != 0;
@ -207,45 +197,26 @@ public final class MaterialContext {
//applying textures
if(loadedTextures != null && loadedTextures.size() > 0) {
Entry<Number, CombinedTexture> basicUVSOwner = null;
for(Entry<Number, CombinedTexture> entry : loadedTextures.entrySet()) {
CombinedTexture combinedTexture = entry.getValue();
combinedTexture.flatten(geometry, geometriesOMA, userDefinedUVCoordinates, blenderContext);
VertexBuffer.Type uvCoordinatesType = null;
switch(entry.getKey().intValue()) {
case MTEX_COL:
uvCoordinatesType = VertexBuffer.Type.TexCoord;
material.setTexture(shadeless ? MaterialHelper.TEXTURE_TYPE_COLOR : MaterialHelper.TEXTURE_TYPE_DIFFUSE,
combinedTexture.getResultTexture());
break;
case MTEX_NOR:
uvCoordinatesType = VertexBuffer.Type.TexCoord2;
material.setTexture(MaterialHelper.TEXTURE_TYPE_NORMAL, combinedTexture.getResultTexture());
break;
case MTEX_SPEC:
uvCoordinatesType = VertexBuffer.Type.TexCoord3;
material.setTexture(MaterialHelper.TEXTURE_TYPE_SPECULAR, combinedTexture.getResultTexture());
break;
case MTEX_EMIT:
uvCoordinatesType = VertexBuffer.Type.TexCoord4;
material.setTexture(MaterialHelper.TEXTURE_TYPE_GLOW, combinedTexture.getResultTexture());
break;
case MTEX_ALPHA:
uvCoordinatesType = VertexBuffer.Type.TexCoord5;
material.setTexture(MaterialHelper.TEXTURE_TYPE_ALPHA, combinedTexture.getResultTexture());
break;
default:
LOGGER.severe("Unknown mapping type: " + entry.getKey().intValue());
}
//applying texture coordinates
if(uvCoordinatesType != null) {
VertexBuffer uvCoordsBuffer = new VertexBuffer(uvCoordinatesType);
uvCoordsBuffer.setupData(Usage.Static, 2, Format.Float,
BufferUtils.createFloatBuffer(combinedTexture.getResultUVS().toArray(new Vector2f[combinedTexture.getResultUVS().size()])));
geometry.getMesh().setBuffer(uvCoordsBuffer);
if(basicUVSOwner == null) {
basicUVSOwner = entry;
} else {
combinedTexture.castToUVS(basicUVSOwner.getValue(), blenderContext);
this.setTexture(material, entry.getKey().intValue(), combinedTexture.getResultTexture());
}
}
if(basicUVSOwner != null) {
this.setTexture(material, basicUVSOwner.getKey().intValue(), basicUVSOwner.getValue().getResultTexture());
List<Vector2f> basicUVS = basicUVSOwner.getValue().getResultUVS();
VertexBuffer uvCoordsBuffer = new VertexBuffer(VertexBuffer.Type.TexCoord);
uvCoordsBuffer.setupData(Usage.Static, 2, Format.Float, BufferUtils.createFloatBuffer(basicUVS.toArray(new Vector2f[basicUVS.size()])));
geometry.getMesh().setBuffer(uvCoordsBuffer);
}
} else if(userDefinedUVCoordinates != null && userDefinedUVCoordinates.size() > 0) {
VertexBuffer uvCoordsBuffer = new VertexBuffer(VertexBuffer.Type.TexCoord);
uvCoordsBuffer.setupData(Usage.Static, 2, Format.Float,
@ -272,6 +243,38 @@ public final class MaterialContext {
geometry.setMaterial(material);
}
/**
* Sets the texture to the given material.
*
* @param material
* the material that we add texture to
* @param mapTo
* the texture mapping type
* @param texture
* the added texture
*/
private void setTexture(Material material, int mapTo, Texture texture) {
switch (mapTo) {
case MTEX_COL:
material.setTexture(shadeless ? MaterialHelper.TEXTURE_TYPE_COLOR : MaterialHelper.TEXTURE_TYPE_DIFFUSE, texture);
break;
case MTEX_NOR:
material.setTexture(MaterialHelper.TEXTURE_TYPE_NORMAL, texture);
break;
case MTEX_SPEC:
material.setTexture(MaterialHelper.TEXTURE_TYPE_SPECULAR, texture);
break;
case MTEX_EMIT:
material.setTexture(MaterialHelper.TEXTURE_TYPE_GLOW, texture);
break;
case MTEX_ALPHA:
material.setTexture(MaterialHelper.TEXTURE_TYPE_ALPHA, texture);
break;
default:
LOGGER.severe("Unknown mapping type: " + mapTo);
}
}
/**
* @return <b>true</b> if the material has at least one generated texture and <b>false</b> otherwise
*/

@ -1,11 +1,23 @@
package com.jme3.scene.plugins.blender.textures;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import jme3tools.converters.ImageToAwt;
import com.jme3.math.Vector2f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.plugins.blender.BlenderContext;
import com.jme3.scene.plugins.blender.BlenderContext.LoadedFeatureDataType;
import com.jme3.scene.plugins.blender.file.Structure;
import com.jme3.scene.plugins.blender.materials.MaterialContext;
import com.jme3.scene.plugins.blender.textures.TriangulatedTexture.TriangleTextureElement;
import com.jme3.scene.plugins.blender.textures.UVCoordinatesGenerator.UVCoordinatesType;
import com.jme3.scene.plugins.blender.textures.UVProjectionGenerator.UVProjectionType;
import com.jme3.scene.plugins.blender.textures.blending.TextureBlender;
@ -18,12 +30,6 @@ import com.jme3.texture.Texture.MagFilter;
import com.jme3.texture.Texture.MinFilter;
import com.jme3.texture.Texture.WrapMode;
import com.jme3.texture.Texture2D;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
import jme3tools.converters.ImageToAwt;
/**
* This class represents a texture that is defined for the material. It can be
@ -33,14 +39,27 @@ import jme3tools.converters.ImageToAwt;
* @author Marcin Roguski (Kaelthas)
*/
public class CombinedTexture {
/** The mapping type of the texture. Defined bu MaterialContext.MTEX_COL, MTEX_NOR etc. */
private final int mappingType;
/** The data for each of the textures. */
private List<TextureData> textureDatas = new ArrayList<TextureData>();
/** The variable indicates if the texture was already triangulated or not. */
private boolean wasTriangulated;
/** The result texture. */
private Texture resultTexture;
/** The UV values for the result texture. */
private List<Vector2f> resultUVS;
/**
* Constructor. Stores the texture mapping type (ie. color map, normal map).
*
* @param mappingType
* texture mapping type
*/
public CombinedTexture(int mappingType) {
this.mappingType = mappingType;
}
/**
* This method adds a texture data to the resulting texture.
*
@ -70,8 +89,7 @@ public class CombinedTexture {
textureData.textureStructure = textureStructure;
if (this.isWithoutAlpha(textureData, blenderContext)) {
textureDatas.clear();// clear previous textures, they will be
// covered anyway
textureDatas.clear();// clear previous textures, they will be covered anyway
}
textureDatas.add(textureData);
}
@ -161,10 +179,17 @@ public class CombinedTexture {
}
if (resultTexture instanceof TriangulatedTexture) {
if(mappingType == MaterialContext.MTEX_NOR) {
for(int i=0;i<((TriangulatedTexture) resultTexture).getFaceTextureCount();++i) {
TriangleTextureElement triangleTextureElement = ((TriangulatedTexture) resultTexture).getFaceTextureElement(i);
triangleTextureElement.image = textureHelper.convertToNormalMapTexture(triangleTextureElement.image, 1);//TODO: get proper strength factor
}
}
resultUVS = ((TriangulatedTexture) resultTexture).getResultUVS();
resultTexture = ((TriangulatedTexture) resultTexture).getResultTexture();
wasTriangulated = true;
}
// setting additional data
resultTexture.setWrap(WrapMode.Repeat);
// the filters are required if generated textures are used because
@ -194,6 +219,130 @@ public class CombinedTexture {
}
}
/**
* This method casts the current image to the basic UV's owner UV's
* coordinates.
*
* @param basicUVSOwner
* the owner of the UV's we cast to
* @param blenderContext
* the blender context
*/
public void castToUVS(CombinedTexture basicUVSOwner, BlenderContext blenderContext) {
if (!basicUVSOwner.wasTriangulated) {
throw new IllegalStateException("The given texture must be triangulated!");
}
if (!this.wasTriangulated) {
resultTexture = new TriangulatedTexture((Texture2D) resultTexture, resultUVS, blenderContext);
resultUVS = ((TriangulatedTexture) resultTexture).getResultUVS();
resultTexture = ((TriangulatedTexture) resultTexture).getResultTexture();
}
if (resultUVS.size() != basicUVSOwner.resultUVS.size()) {
throw new IllegalStateException("The amount of UV coordinates must be equal in order to cas one UV's onto another!");
}
if (!resultUVS.equals(basicUVSOwner.resultUVS)) {
// casting algorithm
TextureHelper textureHelper = blenderContext.getHelper(TextureHelper.class);
ImageLoader imageLoader = new ImageLoader();
List<TriangleTextureElement> faceTextures = new ArrayList<TriangleTextureElement>();
List<Vector2f> basicUVS = basicUVSOwner.getResultUVS();
int[] imageRectangle = new int[4];// minX, minY, maxX, maxY
int[] sourceSize = new int[2], targetSize = new int[2];// width,
// height
Vector2f[] destinationUVS = new Vector2f[3];
Vector2f[] sourceUVS = new Vector2f[3];
List<Vector2f> partImageUVS = Arrays.asList(new Vector2f(), new Vector2f(), new Vector2f());
int faceIndex = 0;
for (int i = 0; i < basicUVS.size(); i += 3) {
// destination size nad UVS
destinationUVS[0] = basicUVS.get(i);
destinationUVS[1] = basicUVS.get(i + 1);
destinationUVS[2] = basicUVS.get(i + 2);
this.computeImageRectangle(destinationUVS, imageRectangle, basicUVSOwner.resultTexture.getImage().getWidth(), basicUVSOwner.resultTexture.getImage().getHeight(), blenderContext);
targetSize[0] = imageRectangle[2] - imageRectangle[0];
targetSize[1] = imageRectangle[3] - imageRectangle[1];
for (int j = 0; j < 3; ++j) {
partImageUVS.get(j).set((basicUVSOwner.resultTexture.getImage().getWidth() * destinationUVS[j].x - imageRectangle[0]) / targetSize[0],
(basicUVSOwner.resultTexture.getImage().getHeight() * destinationUVS[j].y - imageRectangle[1]) / targetSize[1]);
}
// source size and UVS (translate UVS to (0,0) and stretch it to
// the borders of the image)
sourceUVS[0] = resultUVS.get(i);
sourceUVS[1] = resultUVS.get(i + 1);
sourceUVS[2] = resultUVS.get(i + 2);
this.computeImageRectangle(sourceUVS, imageRectangle, resultTexture.getImage().getWidth(), resultTexture.getImage().getHeight(), blenderContext);
sourceSize[0] = imageRectangle[2] - imageRectangle[0];
sourceSize[1] = imageRectangle[3] - imageRectangle[1];
float xTranslateFactor = imageRectangle[0] / (float) resultTexture.getImage().getWidth();
float xStreachFactor = resultTexture.getImage().getWidth() / (float) sourceSize[0];
float yTranslateFactor = imageRectangle[1] / (float) resultTexture.getImage().getHeight();
float yStreachFactor = resultTexture.getImage().getHeight() / (float) sourceSize[1];
for (int j = 0; j < 3; ++j) {
sourceUVS[j].x = (sourceUVS[j].x - xTranslateFactor) * xStreachFactor;
sourceUVS[j].y = (sourceUVS[j].y - yTranslateFactor) * yStreachFactor;
}
AffineTransform affineTransform = textureHelper.createAffineTransform(sourceUVS, partImageUVS.toArray(new Vector2f[3]), sourceSize, targetSize);
Image image = textureHelper.getSubimage(resultTexture.getImage(), imageRectangle[0], imageRectangle[1], imageRectangle[2], imageRectangle[3]);
// compute the result texture
BufferedImage sourceImage = ImageToAwt.convert(image, false, true, 0);
BufferedImage targetImage = new BufferedImage(targetSize[0], targetSize[1], sourceImage.getType());
Graphics2D g = targetImage.createGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.drawImage(sourceImage, affineTransform, null);
g.dispose();
Image output = imageLoader.load(targetImage, false);
faceTextures.add(new TriangleTextureElement(faceIndex++, output, partImageUVS, false, blenderContext));
}
TriangulatedTexture triangulatedTexture = new TriangulatedTexture(faceTextures, blenderContext);
triangulatedTexture.setKeepIdenticalTextures(false);
resultTexture = triangulatedTexture.getResultTexture();
resultUVS = basicUVS;
}
}
/**
* This method computes the rectangle of an image constrained by the
* triangle UV coordinates.
*
* @param triangleVertices
* the triangle UV coordinates
* @param result
* the array where the result is stored
* @param totalImageWidth
* the total image width
* @param totalImageHeight
* the total image height
* @param blenderContext
* the blender context
*/
private void computeImageRectangle(Vector2f[] triangleVertices, int[] result, int totalImageWidth, int totalImageHeight, BlenderContext blenderContext) {
TextureHelper textureHelper = blenderContext.getHelper(TextureHelper.class);
float minX = Math.min(triangleVertices[0].x, triangleVertices[1].x);
minX = Math.min(minX, triangleVertices[2].x);
float maxX = Math.max(triangleVertices[0].x, triangleVertices[1].x);
maxX = Math.max(maxX, triangleVertices[2].x);
float minY = Math.min(triangleVertices[0].y, triangleVertices[1].y);
minY = Math.min(minY, triangleVertices[2].y);
float maxY = Math.max(triangleVertices[0].y, triangleVertices[1].y);
maxY = Math.max(maxY, triangleVertices[2].y);
result[0] = textureHelper.getPixelPosition(minX, totalImageWidth);
result[1] = textureHelper.getPixelPosition(minY, totalImageHeight);
result[2] = textureHelper.getPixelPosition(maxX, totalImageWidth);
result[3] = textureHelper.getPixelPosition(maxY, totalImageHeight);
}
/**
* @return the result texture
*/

@ -33,7 +33,6 @@ package com.jme3.scene.plugins.blender.textures;
import java.awt.color.ColorSpace;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.nio.ByteBuffer;
@ -51,6 +50,7 @@ import com.jme3.asset.BlenderKey;
import com.jme3.asset.BlenderKey.FeaturesToLoad;
import com.jme3.asset.GeneratedTextureKey;
import com.jme3.asset.TextureKey;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.plugins.blender.AbstractBlenderHelper;
import com.jme3.scene.plugins.blender.BlenderContext;
@ -197,12 +197,6 @@ public class TextureHelper extends AbstractBlenderHelper {
public Image convertToNormalMapTexture(Image source, float strengthFactor) {
BufferedImage sourceImage = ImageToAwt.convert(source, false, false, 0);
// flip the image because the result image is upside-down without this operation
AffineTransform tx = AffineTransform.getScaleInstance(1, -1);
tx.translate(0, -sourceImage.getHeight(null));
AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
sourceImage = op.filter(sourceImage, null);
BufferedImage heightMap = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), BufferedImage.TYPE_INT_ARGB);
BufferedImage bumpMap = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), BufferedImage.TYPE_INT_ARGB);
ColorConvertOp gscale = new ColorConvertOp(ColorSpace.getInstance(ColorSpace.CS_GRAY), null);
@ -513,7 +507,113 @@ public class TextureHelper extends AbstractBlenderHelper {
}
return result;
}
/**
* This method creates the affine transform that is used to transform a
* triangle defined by one UV coordinates into a triangle defined by
* different UV's.
*
* @param source
* source UV coordinates
* @param dest
* target UV coordinates
* @param sourceSize
* the width and height of the source image
* @param targetSize
* the width and height of the target image
* @return affine transform to transform one triangle to another
*/
public AffineTransform createAffineTransform(Vector2f[] source, Vector2f[] dest, int[] sourceSize, int[] targetSize) {
float x11 = source[0].getX() * sourceSize[0];
float x12 = source[0].getY() * sourceSize[1];
float x21 = source[1].getX() * sourceSize[0];
float x22 = source[1].getY() * sourceSize[1];
float x31 = source[2].getX() * sourceSize[0];
float x32 = source[2].getY() * sourceSize[1];
float y11 = dest[0].getX() * targetSize[0];
float y12 = dest[0].getY() * targetSize[1];
float y21 = dest[1].getX() * targetSize[0];
float y22 = dest[1].getY() * targetSize[1];
float y31 = dest[2].getX() * targetSize[0];
float y32 = dest[2].getY() * targetSize[1];
float a1 = ((y11 - y21) * (x12 - x32) - (y11 - y31) * (x12 - x22)) / ((x11 - x21) * (x12 - x32) - (x11 - x31) * (x12 - x22));
float a2 = ((y11 - y21) * (x11 - x31) - (y11 - y31) * (x11 - x21)) / ((x12 - x22) * (x11 - x31) - (x12 - x32) * (x11 - x21));
float a3 = y11 - a1 * x11 - a2 * x12;
float a4 = ((y12 - y22) * (x12 - x32) - (y12 - y32) * (x12 - x22)) / ((x11 - x21) * (x12 - x32) - (x11 - x31) * (x12 - x22));
float a5 = ((y12 - y22) * (x11 - x31) - (y12 - y32) * (x11 - x21)) / ((x12 - x22) * (x11 - x31) - (x12 - x32) * (x11 - x21));
float a6 = y12 - a4 * x11 - a5 * x12;
return new AffineTransform(a1, a4, a2, a5, a3, a6);
}
/**
* This method returns the proper pixel position on the image.
*
* @param pos
* the relative position (value of range <0, 1> (both inclusive))
* @param size
* the size of the line the pixel lies on (width, heigth or
* depth)
* @return the integer index of the pixel on the line of the specified width
*/
public int getPixelPosition(float pos, int size) {
float pixelWidth = 1 / (float) size;
pos *= size;
int result = (int) pos;
// here is where we repair floating point operations errors :)
if (Math.abs(result - pos) > pixelWidth) {
++result;
}
return result;
}
/**
* This method returns subimage of the give image. The subimage is
* constrained by the rectangle coordinates. The source image is unchanged.
*
* @param image
* the image to be subimaged
* @param minX
* minimum X position
* @param minY
* minimum Y position
* @param maxX
* maximum X position
* @param maxY
* maximum Y position
* @return a part of the given image
*/
public Image getSubimage(Image image, int minX, int minY, int maxX, int maxY) {
if (minY > maxY) {
throw new IllegalArgumentException("Minimum Y value is higher than maximum Y value!");
}
if (minX > maxX) {
throw new IllegalArgumentException("Minimum Y value is higher than maximum Y value!");
}
if (image.getData().size() > 1) {
throw new IllegalArgumentException("Only flat images are allowed for subimage operation!");
}
if (image.getMipMapSizes() != null) {
LOGGER.warning("Subimaging image with mipmaps is not yet supported!");
}
int width = maxX - minX;
int height = maxY - minY;
ByteBuffer data = BufferUtils.createByteBuffer(width * height * (image.getFormat().getBitsPerPixel() >> 3));
Image result = new Image(image.getFormat(), width, height, data);
PixelInputOutput pixelIO = PixelIOFactory.getPixelIO(image.getFormat());
TexturePixel pixel = new TexturePixel();
for (int x = minX; x < maxX; ++x) {
for (int y = minY; y < maxY; ++y) {
pixelIO.read(image, 0, pixel, x, y);
pixelIO.write(result, 0, pixel, x - minX, y - minY);
}
}
return result;
}
/**
* This method applies the colorband and color factors to image type
* textures. If there is no colorband defined for the texture or the color

@ -1,18 +1,5 @@
package com.jme3.scene.plugins.blender.textures;
import com.jme3.bounding.BoundingBox;
import com.jme3.math.FastMath;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.plugins.blender.BlenderContext;
import com.jme3.scene.plugins.blender.textures.blending.TextureBlender;
import com.jme3.scene.plugins.blender.textures.io.PixelIOFactory;
import com.jme3.scene.plugins.blender.textures.io.PixelInputOutput;
import com.jme3.texture.Image;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.util.BufferUtils;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
@ -29,8 +16,23 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import jme3tools.converters.ImageToAwt;
import com.jme3.bounding.BoundingBox;
import com.jme3.math.FastMath;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.plugins.blender.BlenderContext;
import com.jme3.scene.plugins.blender.textures.blending.TextureBlender;
import com.jme3.scene.plugins.blender.textures.io.PixelIOFactory;
import com.jme3.scene.plugins.blender.textures.io.PixelInputOutput;
import com.jme3.texture.Image;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.util.BufferUtils;
/**
* This texture holds a set of images for each face in the specified mesh. It
* helps to flatten 3D texture, merge 3D and 2D textures and merge 2D textures
@ -38,7 +40,7 @@ import jme3tools.converters.ImageToAwt;
*
* @author Marcin Roguski (Kaelthas)
*/
/* package */class TriangulatedTexture extends Texture {
/* package */class TriangulatedTexture extends Texture2D {
/** The result image format. */
private Format format;
/** The collection of images for each face. */
@ -48,12 +50,13 @@ import jme3tools.converters.ImageToAwt;
* key.
*/
private int maxTextureSize;
/** A variable that can prevent removing identical textures. */
private boolean keepIdenticalTextures = false;
/** The result texture. */
private Texture2D resultTexture;
/** The result texture's UV coordinates. */
private List<Vector2f> resultUVS;
/**
* This method triangulates the given flat texture. The given texture is not
* changed.
@ -72,7 +75,7 @@ import jme3tools.converters.ImageToAwt;
});
int facesCount = uvs.size() / 3;
for (int i = 0; i < facesCount; ++i) {
faceTextures.add(new TriangleTextureElement(i, texture2d, uvs));
faceTextures.add(new TriangleTextureElement(i, texture2d.getImage(), uvs, true, blenderContext));
}
this.format = texture2d.getImage().getFormat();
}
@ -134,6 +137,7 @@ import jme3tools.converters.ImageToAwt;
public void castToUVS(TriangulatedTexture targetTexture, BlenderContext blenderContext) {
int[] sourceSize = new int[2], targetSize = new int[2];
ImageLoader imageLoader = new ImageLoader();
TextureHelper textureHelper = blenderContext.getHelper(TextureHelper.class);
for (TriangleTextureElement entry : faceTextures) {
TriangleTextureElement targetFaceTextureElement = targetTexture.getFaceTextureElement(entry.faceIndex);
Vector2f[] dest = targetFaceTextureElement.uv;
@ -145,7 +149,7 @@ import jme3tools.converters.ImageToAwt;
targetSize[1] = targetFaceTextureElement.image.getHeight();
// create triangle transformation
AffineTransform affineTransform = this.createTransform(entry.uv, dest, sourceSize, targetSize);
AffineTransform affineTransform = textureHelper.createAffineTransform(entry.uv, dest, sourceSize, targetSize);
// compute the result texture
BufferedImage sourceImage = ImageToAwt.convert(entry.image, false, true, 0);
@ -227,11 +231,12 @@ import jme3tools.converters.ImageToAwt;
Integer[] currentPositions = new Integer[] { currentXPos, currentYPos };
imageLayoutData.put(currentElement, currentPositions);
// removing identical images
for (int i = 0; i < list.size(); ++i) {
if (currentElement.image.equals(list.get(i).image)) {
duplicatedFaceIndexes.add(list.get(i).faceIndex);
imageLayoutData.put(list.remove(i--), currentPositions);
if(keepIdenticalTextures) {// removing identical images
for (int i = 0; i < list.size(); ++i) {
if (currentElement.image.equals(list.get(i).image)) {
duplicatedFaceIndexes.add(list.get(i).faceIndex);
imageLayoutData.put(list.remove(i--), currentPositions);
}
}
}
@ -271,6 +276,12 @@ import jme3tools.converters.ImageToAwt;
this.draw(resultImage, entry.getKey().image, entry.getValue()[0], entry.getValue()[1]);
}
}
// setting additional data
resultTexture.setWrap(WrapAxis.S, this.getWrap(WrapAxis.S));
resultTexture.setWrap(WrapAxis.T, this.getWrap(WrapAxis.T));
resultTexture.setMagFilter(this.getMagFilter());
resultTexture.setMinFilter(this.getMinFilter());
}
return resultTexture;
}
@ -300,7 +311,7 @@ import jme3tools.converters.ImageToAwt;
* this exception is thrown if the current image set does not
* contain an image for the given face index
*/
private TriangleTextureElement getFaceTextureElement(int faceIndex) {
public TriangleTextureElement getFaceTextureElement(int faceIndex) {
for (TriangleTextureElement textureElement : faceTextures) {
if (textureElement.faceIndex == faceIndex) {
return textureElement;
@ -308,6 +319,23 @@ import jme3tools.converters.ImageToAwt;
}
throw new IllegalStateException("No face texture element found for index: " + faceIndex);
}
/**
* @return the amount of texture faces
*/
public int getFaceTextureCount() {
return faceTextures.size();
}
/**
* Tells the object wheather to keep or reduce identical face textures.
*
* @param keepIdenticalTextures
* keeps or discards identical textures
*/
public void setKeepIdenticalTextures(boolean keepIdenticalTextures) {
this.keepIdenticalTextures = keepIdenticalTextures;
}
/**
* This method draws the source image on the target image starting with the
@ -335,44 +363,6 @@ import jme3tools.converters.ImageToAwt;
}
}
/**
* This method creates the affine transform that is used to transform a
* triangle defined by one UV coordinates into a triangle defined by
* different UV's.
*
* @param source
* source UV coordinates
* @param dest
* target UV coordinates
* @param sourceSize
* the width and height of the source image
* @param targetSize
* the width and height of the target image
* @return affine transform to transform one triangle to another
*/
private AffineTransform createTransform(Vector2f[] source, Vector2f[] dest, int[] sourceSize, int[] targetSize) {
float x11 = source[0].getX() * sourceSize[0];
float x12 = source[0].getY() * sourceSize[1];
float x21 = source[1].getX() * sourceSize[0];
float x22 = source[1].getY() * sourceSize[1];
float x31 = source[2].getX() * sourceSize[0];
float x32 = source[2].getY() * sourceSize[1];
float y11 = dest[0].getX() * targetSize[0];
float y12 = dest[0].getY() * targetSize[1];
float y21 = dest[1].getX() * targetSize[0];
float y22 = dest[1].getY() * targetSize[1];
float y31 = dest[2].getX() * targetSize[0];
float y32 = dest[2].getY() * targetSize[1];
float a1 = ((y11 - y21) * (x12 - x32) - (y11 - y31) * (x12 - x22)) / ((x11 - x21) * (x12 - x32) - (x11 - x31) * (x12 - x22));
float a2 = ((y11 - y21) * (x11 - x31) - (y11 - y31) * (x11 - x21)) / ((x12 - x22) * (x11 - x31) - (x12 - x32) * (x11 - x21));
float a3 = y11 - a1 * x11 - a2 * x12;
float a4 = ((y12 - y22) * (x12 - x32) - (y12 - y32) * (x12 - x22)) / ((x11 - x21) * (x12 - x32) - (x11 - x31) * (x12 - x22));
float a5 = ((y12 - y22) * (x11 - x31) - (y12 - y32) * (x11 - x21)) / ((x12 - x22) * (x11 - x31) - (x12 - x32) * (x11 - x21));
float a6 = y12 - a4 * x11 - a5 * x12;
return new AffineTransform(a1, a4, a2, a5, a3, a6);
}
/**
* A class that represents an image for a single face of the mesh.
*
@ -395,36 +385,25 @@ import jme3tools.converters.ImageToAwt;
*
* @param faceIndex
* the index of mesh's face this image refers to
* @param texture
* the source texture
* @param sourceImage
* the source image
* @param uvCoordinates
* the UV coordinates that define the image
*/
public TriangleTextureElement(int faceIndex, Texture2D texture, List<Vector2f> uvCoordinates) {
public TriangleTextureElement(int faceIndex, Image sourceImage, List<Vector2f> uvCoordinates, boolean wholeUVList, BlenderContext blenderContext) {
TextureHelper textureHelper = blenderContext.getHelper(TextureHelper.class);
this.faceIndex = faceIndex;
Image sourceImage = texture.getImage();
uv = new Vector2f[] { uvCoordinates.get(faceIndex * 3).clone(), uvCoordinates.get(faceIndex * 3 + 1).clone(), uvCoordinates.get(faceIndex * 3 + 2).clone() };
float pixelWidth = 1 / (float) sourceImage.getWidth();
float pixelHeight = 1 / (float) sourceImage.getHeight();
uv = wholeUVList ?
new Vector2f[] { uvCoordinates.get(faceIndex * 3).clone(), uvCoordinates.get(faceIndex * 3 + 1).clone(), uvCoordinates.get(faceIndex * 3 + 2).clone() } :
new Vector2f[] { uvCoordinates.get(0).clone(), uvCoordinates.get(1).clone(), uvCoordinates.get(2).clone() };
// be careful here, floating point operations might cause the
// texture positions to be inapropriate
int[][] texturePosition = new int[3][2];
for (int i = 0; i < texturePosition.length; ++i) {
float x = uv[i].x * sourceImage.getWidth();
float y = uv[i].y * sourceImage.getHeight();
// here is where errors may occur
texturePosition[i][0] = (int) x;
texturePosition[i][1] = (int) y;
// here is where we repair errors :)
if (Math.abs(texturePosition[i][0] - x) > pixelWidth) {
++texturePosition[i][0];
}
if (Math.abs(texturePosition[i][1] - y) > pixelHeight) {
++texturePosition[i][1];
}
texturePosition[i][0] = textureHelper.getPixelPosition(uv[i].x, sourceImage.getWidth());
texturePosition[i][1] = textureHelper.getPixelPosition(uv[i].y, sourceImage.getHeight());
}
// calculating the extent of the texture
@ -697,24 +676,6 @@ import jme3tools.converters.ImageToAwt;
}
}
@Override
public void setWrap(WrapAxis axis, WrapMode mode) {
}
@Override
public void setWrap(WrapMode mode) {
}
@Override
public WrapMode getWrap(WrapAxis axis) {
return null;
}
@Override
public Type getType() {
return Type.TwoDimensional;
}
@Override
public Texture createSimpleClone() {
return null;

Loading…
Cancel
Save