private String columnDefinitions(List<DataTypeTest.Input<?>> inputs) { List<String> columnTypeDefinitions = inputs.stream() .map(DataTypeTest.Input::getInsertType) .collect(toList()); Stream<String> columnDefinitions = range(0, columnTypeDefinitions.size()) .mapToObj(i -> format("col_%d %s", i, columnTypeDefinitions.get(i))); return Joiner.on(",\n").join(columnDefinitions.iterator()); } }
protected String formatInvokeError(String text, Object[] args) { String formattedArgs = IntStream.range(0, args.length) .mapToObj(i -> (args[i] != null ? "[" + i + "] [type=" + args[i].getClass().getName() + "] [value=" + args[i] + "]" : "[" + i + "] [null]")) .collect(Collectors.joining(",\n", " ", " ")); return text + "\n" + "Endpoint [" + getBeanType().getName() + "]\n" + "Method [" + getBridgedMethod().toGenericString() + "] " + "with argument values:\n" + formattedArgs; }
private String doHandlePropertySet(DbSession dbSession, SetRequest request, @Nullable PropertyDefinition definition, Optional<ComponentDto> component) { validatePropertySet(request, definition); int[] fieldIds = IntStream.rangeClosed(1, request.getFieldValues().size()).toArray(); String inlinedFieldKeys = IntStream.of(fieldIds).mapToObj(String::valueOf).collect(COMMA_JOINER); String key = persistedKey(request); Long componentId = component.isPresent() ? component.get().getId() : null; deleteSettings(dbSession, component, key); dbClient.propertiesDao().saveProperty(dbSession, new PropertyDto().setKey(key).setValue(inlinedFieldKeys).setResourceId(componentId)); List<String> fieldValues = request.getFieldValues(); IntStream.of(fieldIds).boxed() .flatMap(i -> readOneFieldValues(fieldValues.get(i - 1), request.getKey()).entrySet().stream() .map(entry -> new KeyValue(key + "." + i + "." + entry.getKey(), entry.getValue()))) .forEach(keyValue -> dbClient.propertiesDao().saveProperty(dbSession, toFieldProperty(keyValue, componentId))); return inlinedFieldKeys; }
@Test public void testAllFieldSerializer() { @SuppressWarnings("unchecked") TypeSerializer<Object>[] fieldSerializers = TEST_FIELD_SERIALIZERS.stream() .map(t -> (TypeSerializer<Object>) t.f0) .toArray((IntFunction<TypeSerializer<Object>[]>) TypeSerializer[]::new); List<Object>[] instances = IntStream.range(0, TEST_FIELD_SERIALIZERS.get(0).f1.length) .mapToObj(CompositeSerializerTest::getTestCase) .toArray((IntFunction<List<Object>[]>) List[]::new); runTests(getLength(fieldSerializers), fieldSerializers, instances); }
@Test public void does_not_fail_on_100_projects() { List<String> keys = IntStream.rangeClosed(1, 100) .mapToObj(i -> db.components().insertPrivateProject()) .map(ComponentDto::getDbKey) .collect(Collectors.toList()); MetricDto metric = db.measures().insertMetric(); call(keys, singletonList(metric.getKey())); }
public void testForEachPair_parallel() { Stream<String> streamA = IntStream.range(0, 100000).mapToObj(String::valueOf).parallel(); Stream<Integer> streamB = IntStream.range(0, 100000).mapToObj(i -> i).parallel(); AtomicInteger count = new AtomicInteger(0); Streams.forEachPair( streamA, streamB, (a, b) -> { count.incrementAndGet(); Truth.assertThat(a.equals(String.valueOf(b))).isTrue(); }); Truth.assertThat(count.get()).isEqualTo(100000); // of course, this test doesn't prove that anything actually happened in parallel... }
@Override public TestTable setupTestTable(List<DataTypeTest.Input<?>> inputs) { List<String> columnValues = inputs.stream() .map(this::literalInExplicitCast) .collect(toList()); Stream<String> columnValuesWithNames = range(0, columnValues.size()) .mapToObj(i -> format("%s col_%d", columnValues.get(i), i)); String selectBody = Joiner.on(",\n").join(columnValuesWithNames.iterator()); String ddlTemplate = "CREATE TABLE {TABLE_NAME} AS SELECT\n" + selectBody; return new TestTable(sqlExecutor, tableNamePrefix, ddlTemplate); }
@Test public void testCreateUnassignProceduresForMergeFail() throws IOException { List<RegionInfo> regions = getPrimaryRegions(); RegionInfo regionA = regions.get(0); RegionInfo regionB = regions.get(1); AM.getRegionStates().getRegionStateNode(regionB) .setProcedure(TransitRegionStateProcedure.unassign(ENV, regionB)); try { AssignmentManagerUtil.createUnassignProceduresForSplitOrMerge(ENV, Stream.of(regionA, regionB), REGION_REPLICATION); fail("Should fail as the region is in transition"); } catch (HBaseIOException e) { // expected } IntStream.range(0, REGION_REPLICATION) .mapToObj(i -> RegionReplicaUtil.getRegionInfoForReplica(regionA, i)) .map(AM.getRegionStates()::getRegionStateNode).forEachOrdered( rn -> assertFalse("Should have unset the proc for " + rn, rn.isInTransition())); } }
protected String formatInvokeError(String text, Object[] args) { String formattedArgs = IntStream.range(0, args.length) .mapToObj(i -> (args[i] != null ? "[" + i + "] [type=" + args[i].getClass().getName() + "] [value=" + args[i] + "]" : "[" + i + "] [null]")) .collect(Collectors.joining(",\n", " ", " ")); return text + "\n" + "Controller [" + getBeanType().getName() + "]\n" + "Method [" + getBridgedMethod().toGenericString() + "] " + "with argument values:\n" + formattedArgs; }
@Test public void fail_if_more_than_100_project_keys() { List<String> keys = IntStream.rangeClosed(1, 101) .mapToObj(i -> db.components().insertPrivateProject()) .map(ComponentDto::getDbKey) .collect(Collectors.toList()); MetricDto metric = db.measures().insertMetric(); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("101 projects provided, more than maximum authorized (100)"); call(keys, singletonList(metric.getKey())); }
@Test public void testAllDescriptionsEndsWithPeriod() { final Set<String> badDescriptions = Arrays.stream(TokenUtil.getAllTokenIds()) .mapToObj(TokenUtil::getTokenName).map(TokenUtil::getShortDescription) .filter(desc -> desc.charAt(desc.length() - 1) != '.').collect(Collectors.toSet()); assertEquals("Malformed TokenType descriptions", Collections.emptySet(), badDescriptions); }
@Test public void checkNode_returns_causes_of_all_NodeHealthCheck_whichever_their_status() { NodeHealthCheck[] nodeHealthChecks = IntStream.range(0, 1 + random.nextInt(20)) .mapToObj(s -> new HardcodedHealthNodeCheck(IntStream.range(0, random.nextInt(3)).mapToObj(i -> randomAlphanumeric(3)).toArray(String[]::new))) .map(NodeHealthCheck.class::cast) .toArray(NodeHealthCheck[]::new); String[] expected = Arrays.stream(nodeHealthChecks).map(NodeHealthCheck::check).flatMap(s -> s.getCauses().stream()).toArray(String[]::new); HealthCheckerImpl underTest = new HealthCheckerImpl(webServer, nodeHealthChecks); assertThat(underTest.checkNode().getCauses()).containsOnly(expected); }
@Test public void testAscending() { test(IntStream.range(0, INPUT_SIZE), IntStream.range(0, INPUT_SIZE).mapToObj(key -> Integer.toString(key * 2)), MAX_ELEMENTS_COMPARATOR, IntStream.range(INPUT_SIZE - OUTPUT_SIZE, INPUT_SIZE).mapToObj(key -> Integer.toString(key * 2)).iterator()); test(IntStream.range(0, INPUT_SIZE), IntStream.range(0, INPUT_SIZE).mapToObj(key -> Integer.toString(key * 2)), MIN_ELEMENTS_COMPARATOR, IntStream.range(0, OUTPUT_SIZE).map(x -> OUTPUT_SIZE - 1 - x).mapToObj(key -> Integer.toString(key * 2)).iterator()); }