private PartitionStatistics createPartitionStatistics( ConnectorSession session, HiveBasicStatistics basicStatistics, List<String> partitionValues, Map<String, Type> columnTypes, Map<List<String>, ComputedStatistics> partitionComputedStatistics) { Map<ColumnStatisticMetadata, Block> computedColumnStatistics = Optional.ofNullable(partitionComputedStatistics.get(partitionValues)) .map(ComputedStatistics::getColumnStatistics) .orElse(ImmutableMap.of()); long rowCount = basicStatistics.getRowCount().orElseThrow(() -> new IllegalArgumentException("rowCount not present")); Map<String, HiveColumnStatistics> columnStatistics = fromComputedStatistics( session, timeZone, computedColumnStatistics, columnTypes, rowCount); return new PartitionStatistics(basicStatistics, columnStatistics); }
long totalRowCount = 0; for (PartitionStatistics statistics : statisticsWithKnownRowCountAndNullsCount) { long rowCount = statistics.getBasicStatistics().getRowCount().orElseThrow(() -> new VerifyException("rowCount is not present")); verify(rowCount >= 0, "rowCount must be greater than or equal to zero"); HiveColumnStatistics columnStatistics = statistics.getColumnStatistics().get(column); verify(columnStatistics != null, "columnStatistics is null"); long nullsCount = columnStatistics.getNullsCount().orElseThrow(() -> new VerifyException("nullsCount is not present")); verify(nullsCount >= 0, "nullsCount must be greater than or equal to zero"); verify(nullsCount <= rowCount, "nullsCount must be less than or equal to rowCount. nullsCount: %s. rowCount: %s.", nullsCount, rowCount);
long knownDataSize = 0; for (PartitionStatistics statistics : statisticsWithKnownRowCountAndDataSize) { long rowCount = statistics.getBasicStatistics().getRowCount().orElseThrow(() -> new VerifyException("rowCount is not present")); verify(rowCount >= 0, "rowCount must be greater than or equal to zero"); HiveColumnStatistics columnStatistics = statistics.getColumnStatistics().get(column); verify(columnStatistics != null, "columnStatistics is null"); long dataSize = columnStatistics.getTotalSizeInBytes().orElseThrow(() -> new VerifyException("totalSizeInBytes is not present")); verify(dataSize >= 0, "dataSize must be greater than or equal to zero"); knownRowCount += rowCount;
@Override public void write(OptionalLong value, TProtocolWriter protocol) throws Exception { requireNonNull(value, "value is null"); requireNonNull(protocol, "protocol is null"); // write can not be called with a missing value, and instead the write should be skipped // after check the result from isNull protocol.writeI64(value.orElseThrow(() -> new IllegalArgumentException("value is not present"))); }
/** * Returns the duration in milliseconds of the active request that started the longest time ago, * or zero if there are no active requests. */ public long getCurrentRequestMillis() { return NANOSECONDS.toMillis(workers.stream() .mapToLong(ElasticsearchWorker::getCurrentRequestNanos) .max() .orElseThrow(() -> new AssertionError("There should be at least one worker."))); }
@Override public long echoOptionalLong(OptionalLong value) throws EmptyOptionalException { return value.orElseThrow(EmptyOptionalException::new); }
public Optional<QuoteOfTheDay> random() { long count = repository.count(); if(count==0) return Optional.empty(); Random r = new Random(); long randomIndex=count<=Integer.MAX_VALUE? r.nextInt((int)count): r.longs(1, 0, count).findFirst().orElseThrow(AssertionError::new); return StreamSupport.stream(repository.findAll().spliterator(), false) .skip(randomIndex).findFirst(); }
@Override public long getAsLong() { return Convert.toLong(PersistentFieldDefinition.LONG_VALUE.getField(this)) .orElseThrow(NumberFormatException::new); }
@Override public long getAsLong() { return Convert.toLong(getValue()).orElseThrow(NumberFormatException::new); }
private PartitionStatistics createPartitionStatistics( ConnectorSession session, HiveBasicStatistics basicStatistics, Map<String, Type> columnTypes, Map<ColumnStatisticMetadata, Block> computedColumnStatistics) { long rowCount = basicStatistics.getRowCount().orElseThrow(() -> new IllegalArgumentException("rowCount not present")); Map<String, HiveColumnStatistics> columnStatistics = fromComputedStatistics( session, timeZone, computedColumnStatistics, columnTypes, rowCount); return new PartitionStatistics(basicStatistics, columnStatistics); }
public static IClusterCapacity getStageBasedRequiredCapacity(List<PlanStage> stages, int computationLocations, int sortFrameLimit, int groupFrameLimit, int joinFrameLimit, int textSearchFrameLimit, int frameSize) { final OperatorResourcesComputer computer = new OperatorResourcesComputer(computationLocations, sortFrameLimit, groupFrameLimit, joinFrameLimit, textSearchFrameLimit, frameSize); final IClusterCapacity clusterCapacity = new ClusterCapacity(); final Long maxRequiredMemory = stages.stream().mapToLong(stage -> stage.getRequiredMemory(computer)).max() .orElseThrow(IllegalStateException::new); clusterCapacity.setAggregatedMemoryByteSize(maxRequiredMemory); final Integer maxRequireCores = stages.stream().mapToInt(stage -> stage.getRequiredCores(computer)).max() .orElseThrow(IllegalStateException::new); clusterCapacity.setAggregatedCores(maxRequireCores); return clusterCapacity; } }
long knownDataSize = 0; for (PartitionStatistics statistics : statisticsWithKnownRowCountAndDataSize) { long rowCount = statistics.getBasicStatistics().getRowCount().orElseThrow(() -> new VerifyException("rowCount is not present")); verify(rowCount >= 0, "rowCount must be greater than or equal to zero"); HiveColumnStatistics columnStatistics = statistics.getColumnStatistics().get(column); verify(columnStatistics != null, "columnStatistics is null"); long dataSize = columnStatistics.getTotalSizeInBytes().orElseThrow(() -> new VerifyException("totalSizeInBytes is not present")); verify(dataSize >= 0, "dataSize must be greater than or equal to zero"); knownRowCount += rowCount;
long totalRowCount = 0; for (PartitionStatistics statistics : statisticsWithKnownRowCountAndNullsCount) { long rowCount = statistics.getBasicStatistics().getRowCount().orElseThrow(() -> new VerifyException("rowCount is not present")); verify(rowCount >= 0, "rowCount must be greater than or equal to zero"); HiveColumnStatistics columnStatistics = statistics.getColumnStatistics().get(column); verify(columnStatistics != null, "columnStatistics is null"); long nullsCount = columnStatistics.getNullsCount().orElseThrow(() -> new VerifyException("nullsCount is not present")); verify(nullsCount >= 0, "nullsCount must be greater than or equal to zero"); verify(nullsCount <= rowCount, "nullsCount must be less than or equal to rowCount. nullsCount: %s. rowCount: %s.", nullsCount, rowCount);