@Override public int size() { return list.size(); }
@Override public MeshData load(ResourceUrn urn, List<AssetDataFile> inputs) throws IOException { try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputs.get(0).openStream()))) { List<Vector3f> rawVertices = Lists.newArrayList(); List<Vector3f> rawNormals = Lists.newArrayList(); List<Vector2f> rawTexCoords = Lists.newArrayList(); List<Vector3i[]> rawIndices = Lists.newArrayList(); // Gather data readMeshData(reader, rawVertices, rawNormals, rawTexCoords, rawIndices); // Determine face format; if (rawIndices.size() == 0) { throw new IOException("No index data"); } MeshData data = processData(rawVertices, rawNormals, rawTexCoords, rawIndices); if (data.getVertices() == null) { throw new IOException("No vertices define"); } if (!data.getNormals().isEmpty() && data.getNormals().size() != data.getVertices().size()) { throw new IOException("The number of normals does not match the number of vertices."); } if (!data.getTexCoord0().isEmpty() && data.getTexCoord0().size() / 2 != data.getVertices().size() / 3) { throw new IOException("The number of tex coords does not match the number of vertices."); } return data; } }
elements.vertices.size() + /* POSITION */ elements.tex.size() + /* TEX0 (UV0 and flags) */ elements.tex.size() + /* TEX1 (lighting data) */ elements.color.size() + /* COLOR */ elements.normals.size() /* NORMALS */ ); int cColor = 0; int cFlags = 0; for (int i = 0; i < elements.vertices.size(); i += 3, cTex += 2, cColor += 4, cFlags++) { Vector3f vertexPos = new Vector3f( elements.vertices.get(i),
for (int i = 0; i < loader.getVertices().size(); i++) { float originalVertexValue = loader.getVertices().get(i); float adjustedVertexValue = (float) (originalVertexValue * loader.getUnitsPerMeter()); if (((null == data.getColors()) || (0 == data.getColors().size())) && ((null == data.getTexCoord0()) || (0 == data.getTexCoord0().size()))) { throw new IOException("There must be either texture coordinates or vertex colors provided."); if ((null != data.getTexCoord0()) && (0 != data.getTexCoord0().size())) { if (data.getTexCoord0().size() / 2 != data.getVertices().size() / 3) { throw new IOException("The number of tex coords (" + data.getTexCoord0().size() / 2 + ") does not match the number of vertices (" + data.getVertices().size() / 3 + ")."); if ((null != data.getColors()) && (0 != data.getColors().size())) { if (data.getColors().size() / 4 != data.getVertices().size() / 3) { throw new IOException("The number of vertex colors (" + data.getColors().size() / 4 + ") does not match the number of vertices (" + data.getVertices().size() / 3 + ").");
/** * Creates a new AABB that contains the vertices as represented by a {@link TFloatList}. * * @param vertices The vertices to encompass. It is assumed that the X, Y, Z components of each * vertex are stored consecutively in the {@link TFloatList}. * * For the {@code i}th vertex in the list, the X, Y, and Z components * are stored at indices {@code 3 * i}, {@code 3 * i + 1}, and * {@code 3 * i + 2} respectively. * * @return The created AABB. */ public static AABB createEncompasing(TFloatList vertices) { int vertexCount = vertices.size() / 3; if (vertexCount == 0) { return AABB.createEmpty(); } Vector3f min = new Vector3f(vertices.get(0), vertices.get(1), vertices.get(2)); Vector3f max = new Vector3f(vertices.get(0), vertices.get(1), vertices.get(2)); for (int index = 1; index < vertexCount; ++index) { min.x = Math.min(min.x, vertices.get(3 * index)); max.x = Math.max(max.x, vertices.get(3 * index)); min.y = Math.min(min.y, vertices.get(3 * index + 1)); max.y = Math.max(max.y, vertices.get(3 * index + 1)); min.z = Math.min(min.z, vertices.get(3 * index + 2)); max.z = Math.max(max.z, vertices.get(3 * index + 2)); } return AABB.createMinMax(min, max); }
int vertexCount = newData.getVertices().size() / VERTEX_SIZE; int vertexSize = VERTEX_SIZE; parts.add(newData.getVertices().iterator()); partSizes.add(VERTEX_SIZE); if (newData.getTexCoord0() != null && newData.getTexCoord0().size() / TEX_COORD_0_SIZE == vertexCount) { parts.add(newData.getTexCoord0().iterator()); partSizes.add(TEX_COORD_0_SIZE); hasTexCoord0 = true; if (newData.getTexCoord1() != null && newData.getTexCoord1().size() / TEX_COORD_1_SIZE == vertexCount) { parts.add(newData.getTexCoord1().iterator()); partSizes.add(TEX_COORD_1_SIZE); hasTexCoord1 = true; if (newData.getNormals() != null && newData.getNormals().size() / NORMAL_SIZE == vertexCount) { parts.add(newData.getNormals().iterator()); partSizes.add(NORMAL_SIZE); hasNormal = true; if (newData.getColors() != null && newData.getColors().size() / COLOR_SIZE == vertexCount) { parts.add(newData.getColors().iterator()); partSizes.add(COLOR_SIZE);
public SkeletalMeshDataBuilder addMesh(Bone bone, MeshData data) { TFloatList meshVertices = data.getVertices(); TIntList meshIndices = data.getIndices(); TFloatList texCoord0 = data.getTexCoord0(); int weightsStart = weights.size(); addBone(bone); for (int i = 0; i < meshVertices.size() / 3; i++) { float x = meshVertices.get(i * 3); float y = meshVertices.get(i * 3 + 1); float z = meshVertices.get(i * 3 + 2); Vector3f pos = new Vector3f(x, y, z); BoneWeight weight = new BoneWeight(pos, 1, bone.getIndex()); // TODO Meshes may contain normal vectors and we may copy them to the weight here // - but they are recalculated later on in either case. needs some rework addWeight(weight); vertexStartWeights.add(weightsStart + i); vertexWeightCounts.add(1); uvs.add(new Vector2f(texCoord0.get(i * 2), texCoord0.get(i * 2 + 1))); } for (int i = 0; i < meshIndices.size(); i++) { indices.add(meshIndices.get(i) + weightsStart); } return this; }
@Override public int size() { return list.size(); }
@Override public int size() { return list.size(); }
@Override public int size() { return list.size(); }
@Override public int size() { return list.size(); }
/** * Replaces the current buffer data with the list of bytes in the give {@link gnu.trove.list.TFloatList} This method arbitrarily creates data for the ByteBuffer regardless of the data type of the * vertex attribute. * * @param list to set */ public void setData(TFloatList list) { this.buffer = CausticUtil.createByteBuffer(list.size() * DataType.FLOAT.getByteSize()); final TFloatIterator iterator = list.iterator(); while (iterator.hasNext()) { buffer.putFloat(iterator.next()); } }
@Override public void add(int vecid, float[] vector) { if (!indexer.containsKey(vecid)) { int start = data.size(); indexer.put(vecid, start); for (float val : vector) { data.add(val); } lengths.put(vecid, vector.length); if (listening) { for (VectorSetListener l : listeners) { l.onVectorAdded(this, vecid, vector); } } } }
/** {@inheritDoc} */ @Override public boolean equals( Object other ) { if ( other == this ) { return true; } if ( !( other instanceof TFloatList ) ) return false; if ( other instanceof TFloatArrayList ) { TFloatArrayList that = ( TFloatArrayList )other; if ( that.size() != this.size() ) return false; for ( int i = _pos; i-- > 0; ) { if ( this._data[ i ] != that._data[ i ] ) { return false; } } return true; } else { TFloatList that = ( TFloatList )other; if ( that.size() != this.size() ) return false; for( int i = 0; i < _pos; i++ ) { if ( this._data[ i ] != that.get( i ) ) { return false; } } return true; } }
@Override public void _add(int vecid, int[] pairs) { validateParams(vecid, pairs); if (!indexer.containsKey(vecid)) { int start = data.size(); indexer.put(vecid, start); lengths.put(vecid, pairs.length); for (int val : pairs) { data.add(val); } if (listening) { for (VectorSetListener l : listeners) { l.onVectorAdded(this, vecid, pairs); } } } }
private static void loadIndices(int[] rawIndices, TObjectIntMap<String> offsets, TIntList indices, TFloatList rawTextureCoords, TFloatList textureCoords, TFloatList rawNormals, TFloatList normals) { final int positionOffset = offsets.get(SEMANTIC_VERTEX); final int texCoordsOffset = offsets.get(SEMANTIC_TEXCOORD); final int normalOffset = offsets.get(SEMANTIC_NORMAL); final int components = texCoordsOffset == -1 ? normalOffset == -1 ? 1 : 2 : 3; if (textureCoords != null && texCoordsOffset != -1) { textureCoords.fill(0, rawTextureCoords.size(), 0); } if (normals != null && normalOffset != -1) { normals.fill(0, rawNormals.size(), 0); } for (int i = 0; i < rawIndices.length; i += components) { final int positionIndex = rawIndices[i + positionOffset]; indices.add(positionIndex); if (textureCoords != null && texCoordsOffset != -1) { final int texCoordsIndex = rawIndices[i + texCoordsOffset] * STEP_TEXCOORD; for (int s = 0; s < STEP_TEXCOORD; s++) { textureCoords.set(positionIndex * STEP_TEXCOORD + s, rawTextureCoords.get(texCoordsIndex + s)); } } if (normals != null && normalOffset != -1) { final int normalIndex = rawIndices[i + normalOffset] * STEP_NORMAL; for (int s = 0; s < STEP_NORMAL; s++) { normals.set(positionIndex * STEP_NORMAL + s, rawNormals.get(normalIndex + s)); } } } }
@Override public void clean() { TFloatList olddata = data; TIntIntMap oldindexer = indexer; data = new TFloatArrayList(olddata.size()); indexer = new TIntIntHashMap(oldindexer.size()); int pos = 0; TIntIntIterator iter = oldindexer.iterator(); while (iter.hasNext()) { iter.advance(); int vecid = iter.key(); int start = iter.value(); int length = lengths.get(vecid); int cursor = 0; indexer.put(vecid, pos); while (cursor < length) { data.add(olddata.get(start + cursor)); pos++; cursor++; } } }
@Override public void clean() { TFloatList olddata = data; TIntIntMap oldindexer = indexer; data = new TFloatArrayList(olddata.size()); indexer = new TIntIntHashMap(oldindexer.size()); int pos = 0; TIntIntIterator iter = oldindexer.iterator(); while (iter.hasNext()) { iter.advance(); int vecid = iter.key(); int start = iter.value(); int length = lengths.get(vecid); int cursor = 0; indexer.put(vecid, pos); while (cursor < length) { data.add(olddata.get(start + cursor)); pos++; cursor++; } } }
@Override public void write(Kryo kryo, Output output, DenseVectorSet vectorSet) { output.writeString(vectorSet.key()); kryo.writeObject(output, vectorSet.accumuFactor); kryo.writeObject(output, vectorSet.sparseFactor); TIntIntMap indexer = vectorSet.indexer; TIntIntIterator iter = indexer.iterator(); kryo.writeObject(output, indexer.size()); while (iter.hasNext()) { iter.advance(); int vecid = iter.key(); int start = iter.value(); int length = vectorSet.length(vecid); kryo.writeObject(output, vecid); kryo.writeObject(output, start); kryo.writeObject(output, length); } int size = vectorSet.data.size(); kryo.writeObject(output, size); for (int offset = 0; offset < size; offset++) { float val = vectorSet.data.get(offset); kryo.writeObject(output, val); } }
@Override public void write(Kryo kryo, Output output, SparseVectorSet vectorSet) { output.writeString(vectorSet.key()); kryo.writeObject(output, vectorSet.accumuFactor); kryo.writeObject(output, vectorSet.sparseFactor); TIntIntMap indexer = vectorSet.indexer; TIntIntIterator iter = indexer.iterator(); kryo.writeObject(output, indexer.size()); while (iter.hasNext()) { iter.advance(); int vecid = iter.key(); int start = iter.value(); int length = vectorSet.length(vecid); kryo.writeObject(output, vecid); kryo.writeObject(output, start); kryo.writeObject(output, length); } int size = vectorSet.data.size(); kryo.writeObject(output, size); for (int offset = 0; offset < size; offset++) { float val = vectorSet.data.get(offset); kryo.writeObject(output, val); } } }