/** * Write a {@code bytes} field to the stream. */ public void writeBytesNoTag(final ByteString value) throws IOException { final byte[] bytes = value.toByteArray(); writeRawVarint32(bytes.length); writeRawBytes(bytes); }
public static AccelerationDetails deserialize(ByteString bytes) { if (bytes == null) { return null; } return SERIALIZER.revert(bytes.toByteArray()); }
@Override public void serialize(ByteString value, JsonGenerator gen, SerializerProvider provider) throws IOException { gen.writeBinary(value.toByteArray()); }
public static JoinDependencyProperties deserializeJoinDependencyProperties(ByteString bytes) { return JOIN_DEPENDENCY_PROPERTIES_SERIALIZER.revert(bytes.toByteArray()); } }
@SuppressWarnings("unchecked") @Override public <T extends AbstractConnectionConf> T getConnectionConf(String typeName, ByteString bytesS) { Schema<T> schema = (Schema<T>) schemaByName.get(typeName); if(schema == null) { throw new IllegalStateException(String.format("Unable to find handler for source of type [%s].", typeName)); } T conf = schema.newMessage(); byte[] bytes = bytesS.toByteArray(); ProtobufIOUtil.mergeFrom(bytes, conf, schema); return conf; }
public StoragePluginId( SourceConfig config, ConnectionConf<?, ?> connection, SourceCapabilities capabilities ) { this.config = Preconditions.checkNotNull(config); this.connection = connection; this.capabilities = capabilities; this.hashCode = Objects.hashCode(config, capabilities); assert Arrays.equals(connection.toBytes(), config.getConfig().toByteArray()); }
public static RefreshDecision getRefreshDecision(final JobAttempt jobAttempt) { if(jobAttempt.getExtraInfoList() == null || jobAttempt.getExtraInfoList().isEmpty()) { throw new IllegalStateException("No refresh decision found in refresh job."); } List<ExtraInfo> extraInfo = jobAttempt.getExtraInfoList().stream() .filter(i -> RefreshHandler.DECISION_NAME.equals(i.getName())) .collect(Collectors.toList()); if(extraInfo.size() != 1) { throw new IllegalStateException(String.format("Expected to have one refresh decision, saw: %d.", extraInfo.size())); } return RefreshHandler.SERIALIZER.revert(extraInfo.get(0).getData().toByteArray()); }
private static byte[] getByteArray(PartitionValue partitionValue) { if (partitionValue.getBinaryValue() == null) { return null; } return partitionValue.getBinaryValue().toByteArray(); } }
public SplitAndExtended(DatasetSplit split) { super(); this.split = split; this.extended = EasyDatasetXAttrSerDe.EASY_DATASET_SPLIT_XATTR_SERIALIZER.revert(split.getExtendedProperty().toByteArray()); } public DatasetSplit getSplit() {
ParquetDatasetSplit(DatasetSplit datasetSplit) { this.datasetSplit = datasetSplit; this.splitXAttr = ParquetDatasetXAttrSerDe.PARQUET_DATASET_SPLIT_SCAN_XATTR_SERIALIZER.revert(datasetSplit.getExtendedProperty().toByteArray());; }
public ElasticTableXattr getExtendedAttributes(){ if(extendedAttributes == null){ try { extendedAttributes = ElasticTableXattr.parseFrom(tableMetadata.getReadDefinition().getExtendedProperty().toByteArray()); } catch (InvalidProtocolBufferException e) { throw Throwables.propagate(e); } } return extendedAttributes; }
private HiveTableXattr getExtended(){ if(extended == null){ try { extended = HiveTableXattr.parseFrom(getTableMetadata().getReadDefinition().getExtendedProperty().toByteArray()); } catch (InvalidProtocolBufferException e) { throw Throwables.propagate(e); } } return extended; }
@Override public boolean matches(RelOptRuleCall call) { final HiveScanDrel scan = call.rel(1); if (scan.getFilter() != null) { return false; } try { final HiveTableXattr tableXattr = HiveTableXattr.parseFrom(scan.getTableMetadata().getReadDefinition().getExtendedProperty().toByteArray()); final Optional<String> inputFormat = HiveReaderProtoUtil.getTableInputFormat(tableXattr); return inputFormat.isPresent() && inputFormat.get().equals(OrcInputFormat.class.getCanonicalName()); } catch (InvalidProtocolBufferException e) { logger.warn("Failure while attempting to deserialize hive table attributes.", e); } return false; }
public static KeyRange fromSplit(DatasetSplit input) { byte[] prop = input.getExtendedProperty().toByteArray(); try { HBaseSplitXattr split = HBaseSplitXattr.parseFrom(prop); return getRange(split.getStart(), split.getStop()); } catch (InvalidProtocolBufferException e) { throw Throwables.propagate(e); } }
private static Optional<ClusterIdentity> getClusterIdentityFromStore(ConfigurationStore store, KVStoreProvider provider) { final ConfigurationEntry entry = store.get(SupportService.CLUSTER_ID); if (entry == null) { Optional<ClusterIdentity> upgradedClusterIdentity = upgradeToNewSupportStore(provider); return upgradedClusterIdentity; } try { ClusterIdentity identity = ClusterIdentity.getSchema().newMessage(); ProtostuffIOUtil.mergeFrom(entry.getValue().toByteArray(), identity, ClusterIdentity.getSchema()); return Optional.ofNullable(identity); } catch (Exception e) { logger.info("failed to get cluster identity", e); return Optional.empty(); } }
SearchConfiguration getSearchConfig() { final ConfigurationEntry configurationEntry = configurationStore.get(CONFIG_KEY); final SearchConfiguration searchConfiguration = new SearchConfiguration(); if (configurationEntry != null) { ProtostuffIOUtil.mergeFrom(configurationEntry.getValue().toByteArray(), searchConfiguration, SearchConfiguration.getSchema()); } else { searchConfiguration.setLastWakeupTime(0L); } return searchConfiguration; }
HiveParquetSplit(DatasetSplit datasetSplit) { this.datasetSplit = datasetSplit; try { final HiveSplitXattr splitAttr = HiveSplitXattr.parseFrom(datasetSplit.getExtendedProperty().toByteArray()); final FileSplit fullFileSplit = (FileSplit) HiveUtilities.deserializeInputSplit(splitAttr.getInputSplit()); // make a copy of file split, we only need file path, start and length, throw away hosts this.fileSplit = new FileSplit(fullFileSplit.getPath(), fullFileSplit.getStart(), fullFileSplit.getLength(), (String[])null); this.partitionId = splitAttr.getPartitionId(); } catch (IOException | ReflectiveOperationException e) { throw new RuntimeException("Failed to parse dataset split for " + datasetSplit.getSplitKey(), e); } }
private List<FileSystemCachedEntity> getCachedEntities(DatasetConfig datasetConfig) throws Exception{ final HiveReadSignature readSignature = HiveReadSignature.parseFrom(datasetConfig.getReadDefinition().getReadSignature().toByteArray()); // for now we only support fs based read signatures if (readSignature.getType() == HiveReadSignatureType.FILESYSTEM) { List<FileSystemCachedEntity> cachedEntities = Lists.newArrayList(); for (FileSystemPartitionUpdateKey updateKey: readSignature.getFsPartitionUpdateKeysList()) { cachedEntities.addAll(updateKey.getCachedEntitiesList()); } return cachedEntities; } return null; }
private void setLastWakeupTime(long lastWakeupTime) { ConfigurationEntry configurationEntry = configurationStore.get(CONFIG_KEY); final SearchConfiguration searchConfiguration = SearchConfiguration.getDefaultInstance(); if (configurationEntry != null) { ProtostuffIOUtil.mergeFrom(configurationEntry.getValue().toByteArray(), searchConfiguration, SearchConfiguration.getSchema()); } else { configurationEntry = new ConfigurationEntry(); } searchConfiguration.setLastWakeupTime(lastWakeupTime); configurationEntry.setValue(convertSearchConfigurationToByteString(searchConfiguration)); configurationStore.put(CONFIG_KEY, configurationEntry); }
private ByteString convertToScanXAttr(ByteString xattrFullSerialized) { ParquetDatasetSplitXAttr fullXAttr = ParquetDatasetXAttrSerDe.PARQUET_DATASET_SPLIT_XATTR_SERIALIZER.revert(xattrFullSerialized.toByteArray());; ParquetDatasetSplitScanXAttr scanXAttr = new ParquetDatasetSplitScanXAttr(); scanXAttr.setPath(fullXAttr.getPath()); scanXAttr.setFileLength(fullXAttr.getUpdateKey().getLength()); scanXAttr.setStart(fullXAttr.getStart()); scanXAttr.setLength(fullXAttr.getLength()); scanXAttr.setRowGroupIndex(fullXAttr.getRowGroupIndex()); return ByteString.copyFrom(ParquetDatasetXAttrSerDe.PARQUET_DATASET_SPLIT_SCAN_XATTR_SERIALIZER.serialize(scanXAttr)); }