/** * Copies the given bytes into a {@code ByteString}. */ public static ByteString copyFrom(final byte[] bytes) { return copyFrom(bytes, 0, bytes.length); }
/** * Write a {@code bytes} field to the stream. */ public void writeBytesNoTag(final ByteString value) throws IOException { final byte[] bytes = value.toByteArray(); writeRawVarint32(bytes.length); writeRawBytes(bytes); }
@Override public void writeBytes(int fieldNumber, ByteString value, boolean repeated) throws IOException { writeByteArray(fieldNumber, value.getBytes(), repeated); }
/** * Read a {@code bytes} field value from the stream. */ @Override public ByteString readBytes() throws IOException { final int size = readRawVarint32(); if (size == 0) { return ByteString.EMPTY; } if (size <= (bufferSize - bufferPos) && size > 0) { // Fast path: We already have the bytes in a contiguous buffer, so // just copy directly from it. final ByteString result = ByteString.copyFrom(buffer, bufferPos, size); bufferPos += size; return result; } else { // Slow path: Build a byte array first then copy it. // return ByteString.copyFrom(readRawBytes(size)); return ByteString.wrap(readRawBytes(size)); } }
@Override public ByteString readBytes() throws IOException { return ByteString.wrap(readByteArray()); }
private ByteString convertToScanXAttr(ByteString xattrFullSerialized) { ParquetDatasetSplitXAttr fullXAttr = ParquetDatasetXAttrSerDe.PARQUET_DATASET_SPLIT_XATTR_SERIALIZER.revert(xattrFullSerialized.toByteArray());; ParquetDatasetSplitScanXAttr scanXAttr = new ParquetDatasetSplitScanXAttr(); scanXAttr.setPath(fullXAttr.getPath()); scanXAttr.setFileLength(fullXAttr.getUpdateKey().getLength()); scanXAttr.setStart(fullXAttr.getStart()); scanXAttr.setLength(fullXAttr.getLength()); scanXAttr.setRowGroupIndex(fullXAttr.getRowGroupIndex()); return ByteString.copyFrom(ParquetDatasetXAttrSerDe.PARQUET_DATASET_SPLIT_SCAN_XATTR_SERIALIZER.serialize(scanXAttr)); }
private DatasetConfig update(DatasetConfig datasetConfig) { if (datasetConfig == null) { return null; } final io.protostuff.ByteString schemaBytes = DatasetHelper.getSchemaBytes(datasetConfig); if (schemaBytes == null) { return null; } try { OldSchema oldSchema = OldSchema.getRootAsOldSchema(schemaBytes.asReadOnlyByteBuffer()); byte[] newschemaBytes = convertFromOldSchema(oldSchema); datasetConfig.setRecordSchema(ByteString.copyFrom(newschemaBytes)); return datasetConfig; } catch (Exception e) { System.out.println("Unable to update Arrow Schema for: " + PathUtils .constructFullPath(Optional.ofNullable(datasetConfig.getFullPathList()).orElse(Lists.newArrayList()))); e.printStackTrace(System.out); return null; } }
static ByteString bs(String text) { return ByteString.copyFromUtf8(text); }
@Override public List<Entry<String, ByteString>> getSharedData() { return Collections.singletonList( new SimpleEntry<>( HIVE_ATTRIBUTE_KEY, ByteString.copyFrom(dataset.getReadDefinition().getExtendedProperty().asReadOnlyByteBuffer()) ) ); }
@Override public ByteString readBytes() throws IOException { return ByteString.wrap(readByteArray()); }
new Double[] { null, 1.1d, null, 2.2d, null }, new String[] { null, "a", null, "b", null }, new ByteString[] { null, ByteString.copyFromUtf8("a"), null, ByteString.copyFromUtf8("b"), null }, new byte[][] { null, new byte[] { 'a' }, null, new byte[] { 'b' }, null }, new BigDecimal[] { null, new BigDecimal(1.1d), null, new BigDecimal(2.2d), null },
public static String[] getColumnsLowerCase(DatasetConfig datasetConfig) { final ByteString schemaBytes = DatasetHelper.getSchemaBytes(datasetConfig); if (schemaBytes != null) { Schema schema = Schema.getRootAsSchema(schemaBytes.asReadOnlyByteBuffer()); org.apache.arrow.vector.types.pojo.Schema s = org.apache.arrow.vector.types.pojo.Schema.convertSchema(schema); return s.getFields().stream().map(input -> input.getName().toLowerCase()).toArray(String[]::new); } else { // If virtual dataset was created with view fields if (datasetConfig.getType() == DatasetType.VIRTUAL_DATASET) { final List<ViewFieldType> viewFieldTypes = datasetConfig.getVirtualDataset().getSqlFieldsList(); if (notEmpty(viewFieldTypes)) { return viewFieldTypes.stream().map(input -> input.getName().toLowerCase()).toArray(String[]::new); } } } return new String[0]; }
public void testFooNullFieldsButLast() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[null,1]" + ",\"someString\":[null,\"string\"]" + ",\"someBar\":[null,{}]" + ",\"someEnum\":[null,1]" + ",\"someBytes\":[null,\"fw==\"]" + // 0x7f ",\"someBoolean\":[null,true]" + ",\"someFloat\":[null,10.01]" + ",\"someDouble\":[null,100.001]" + ",\"someLong\":[null,1000]}"), b, b.cachedSchema(), false); assertEquals(b.getSomeInt(), Arrays.asList(new Integer(1))); assertEquals(b.getSomeString(), Arrays.asList("string")); assertEquals(b.getSomeBar(), Arrays.asList(new Bar())); assertEquals(b.getSomeEnum(), Arrays.asList(Foo.EnumSample.TYPE1)); assertEquals(b.getSomeBytes(), Arrays.asList(ByteString.copyFrom(new byte[] { 0x7f }))); assertEquals(b.getSomeBoolean(), Arrays.asList(Boolean.TRUE)); assertEquals(b.getSomeFloat(), Arrays.asList(new Float(10.01f))); assertEquals(b.getSomeDouble(), Arrays.asList(new Double(100.001d))); assertEquals(b.getSomeLong(), Arrays.asList(new Long(1000l))); }
public static AccelerationDetails deserialize(ByteString bytes) { if (bytes == null) { return null; } return SERIALIZER.revert(bytes.toByteArray()); }
@Override public void writeBytes(int fieldNumber, ByteString value, boolean repeated) throws IOException { writeByteArray(fieldNumber, value.getBytes(), repeated); }
@Override public ByteString readBytes() throws IOException { return ByteString.wrap(readByteArray()); }
new Double[] { 1.1d, null, 2.2d }, new String[] { "a", null, "b" }, new ByteString[] { ByteString.copyFromUtf8("a"), null, ByteString.copyFromUtf8("b") }, new byte[][] { new byte[] { 'a' }, null, new byte[] { 'b' } }, new BigDecimal[] { new BigDecimal(1.1d), null, new BigDecimal(2.2d) },
public static BatchSchema deserialize(ByteString bytes) { Schema schema = Schema.getRootAsSchema(bytes.asReadOnlyByteBuffer()); org.apache.arrow.vector.types.pojo.Schema s = org.apache.arrow.vector.types.pojo.Schema.convertSchema(schema); return new BatchSchema(SelectionVectorMode.NONE, s.getFields()); }