public long readFixed64() throws IOException { return (readFixed32() & 0xFFFFFFFFL) | (((long)readFixed32()) << 32); }
public static BlockDescriptor read(InputBuffer in) throws IOException { BlockDescriptor result = new BlockDescriptor(); result.rowCount = in.readFixed32(); result.uncompressedSize = in.readFixed32(); result.compressedSize = in.readFixed32(); return result; }
public float readFloat() throws IOException { return Float.intBitsToFloat(readFixed32()); }
public <T extends Comparable> T readValue(ValueType type) throws IOException { switch (type) { case NULL: return (T)null; case BOOLEAN: return (T)Boolean.valueOf(readBoolean()); case INT: return (T)Integer.valueOf(readInt()); case LONG: return (T)Long.valueOf(readLong()); case FIXED32: return (T)Integer.valueOf(readFixed32()); case FIXED64: return (T)Long.valueOf(readFixed64()); case FLOAT: return (T)Float.valueOf(readFloat()); case DOUBLE: return (T)Double.valueOf(readDouble()); case STRING: return (T)readString(); case BYTES: return (T)readBytes(null); default: throw new TrevniRuntimeException("Unknown value type: "+type); } }
private void readHeader() throws IOException { InputBuffer in = new InputBuffer(file, 0); readMagic(in); this.rowCount = in.readFixed64(); this.columnCount = in.readFixed32(); this.metaData = ColumnFileMetaData.read(in); this.columnsByName = new HashMap<>(columnCount); columns = new ColumnDescriptor[columnCount]; readColumnMetaData(in); readColumnStarts(in); }
@Test public void testFixed32() throws Exception { Random random = TestUtil.createRandom(); OutputBuffer out = new OutputBuffer(); for (int i = 0; i < COUNT; i++) out.writeFixed32(random.nextInt()); InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray())); random = TestUtil.createRandom(); for (int i = 0; i < COUNT; i++) Assert.assertEquals(random.nextInt(), in.readFixed32()); }
public void ensureBlocksRead() throws IOException { if (blocks != null) return; // read block descriptors InputBuffer in = new InputBuffer(file, start); int blockCount = in.readFixed32(); BlockDescriptor[] blocks = new BlockDescriptor[blockCount]; if (metaData.hasIndexValues()) firstValues = (T[])new Comparable[blockCount]; for (int i = 0; i < blockCount; i++) { blocks[i] = BlockDescriptor.read(in); if (metaData.hasIndexValues()) firstValues[i] = in.readValue(metaData.getType()); } dataStart = in.tell(); // compute blockStarts and firstRows Checksum checksum = Checksum.get(metaData); blockStarts = new long[blocks.length]; firstRows = new long[blocks.length]; long startPosition = dataStart; long row = 0; for (int i = 0; i < blockCount; i++) { BlockDescriptor b = blocks[i]; blockStarts[i] = startPosition; firstRows[i] = row; startPosition += b.compressedSize + checksum.size(); row += b.rowCount; } this.blocks = blocks; }
public long readFixed64() throws IOException { return (readFixed32() & 0xFFFFFFFFL) | (((long)readFixed32()) << 32); }
public static BlockDescriptor read(InputBuffer in) throws IOException { BlockDescriptor result = new BlockDescriptor(); result.rowCount = in.readFixed32(); result.uncompressedSize = in.readFixed32(); result.compressedSize = in.readFixed32(); return result; }
public float readFloat() throws IOException { return Float.intBitsToFloat(readFixed32()); }
public <T extends Comparable> T readValue(ValueType type) throws IOException { switch (type) { case NULL: return (T)null; case INT: return (T)Integer.valueOf(readInt()); case LONG: return (T)Long.valueOf(readLong()); case FIXED32: return (T)Integer.valueOf(readFixed32()); case FIXED64: return (T)Long.valueOf(readFixed64()); case FLOAT: return (T)Float.valueOf(readFloat()); case DOUBLE: return (T)Double.valueOf(readDouble()); case STRING: return (T)readString(); case BYTES: return (T)readBytes(null); default: throw new TrevniRuntimeException("Unknown value type: "+type); } }
private void readHeader() throws IOException { InputBuffer in = new InputBuffer(file, 0); readMagic(in); this.rowCount = in.readFixed64(); this.columnCount = in.readFixed32(); this.metaData = ColumnFileMetaData.read(in); this.columnsByName = new HashMap<String,ColumnDescriptor>(columnCount); columns = new ColumnDescriptor[columnCount]; readColumnMetaData(in); readColumnStarts(in); }
@Test public void testFixed32() throws Exception { Random random = TestUtil.createRandom(); OutputBuffer out = new OutputBuffer(); for (int i = 0; i < COUNT; i++) out.writeFixed32(random.nextInt()); InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray())); random = TestUtil.createRandom(); for (int i = 0; i < COUNT; i++) Assert.assertEquals(random.nextInt(), in.readFixed32()); }
public void ensureBlocksRead() throws IOException { if (blocks != null) return; // read block descriptors InputBuffer in = new InputBuffer(file, start); int blockCount = in.readFixed32(); BlockDescriptor[] blocks = new BlockDescriptor[blockCount]; if (metaData.hasIndexValues()) firstValues = (T[])new Comparable[blockCount]; for (int i = 0; i < blockCount; i++) { blocks[i] = BlockDescriptor.read(in); if (metaData.hasIndexValues()) firstValues[i] = in.<T>readValue(metaData.getType()); } dataStart = in.tell(); // compute blockStarts and firstRows Checksum checksum = Checksum.get(metaData); blockStarts = new long[blocks.length]; firstRows = new long[blocks.length]; long startPosition = dataStart; long row = 0; for (int i = 0; i < blockCount; i++) { BlockDescriptor b = blocks[i]; blockStarts[i] = startPosition; firstRows[i] = row; startPosition += b.compressedSize + checksum.size(); row += b.rowCount; } this.blocks = blocks; }