private Stream<SystemStreamPartition> mapSSMToSSP(Map.Entry<SystemStream, SystemStreamMetadata> ssMs) { return ssMs.getValue() .getSystemStreamPartitionMetadata() .keySet() .stream() .map(partition -> new SystemStreamPartition(ssMs.getKey(), partition)); }
@Override public Object deserializeKey(String sspString, DeserializationContext ctxt) throws IOException { int idx = sspString.indexOf('.'); int lastIdx = sspString.lastIndexOf('.'); if (idx < 0 || lastIdx < 0) { throw new IllegalArgumentException("System stream partition expected in format 'system.stream.partition"); } return new SystemStreamPartition( new SystemStream(sspString.substring(0, idx), sspString.substring(idx + 1, lastIdx)), new Partition(Integer.parseInt(sspString.substring(lastIdx + 1)))); } }
@Override public SystemStreamPartition getPreviousSSP(SystemStreamPartition currentSystemStreamPartition, int previousPartitionCount, int currentPartitionCount) { Preconditions.checkNotNull(currentSystemStreamPartition); Preconditions.checkArgument(currentPartitionCount % previousPartitionCount == 0, String.format("New partition count: %d should be a multiple of previous partition count: %d.", currentPartitionCount, previousPartitionCount)); Partition partition = currentSystemStreamPartition.getPartition(); Preconditions.checkNotNull(partition, String.format("SystemStreamPartition: %s cannot have null partition", currentSystemStreamPartition)); int currentPartitionId = partition.getPartitionId(); int previousPartitionId = currentPartitionId % previousPartitionCount; return new SystemStreamPartition(currentSystemStreamPartition.getSystemStream(), new Partition(previousPartitionId)); } }
public CoordinatorStreamSystemConsumer(SystemStream coordinatorSystemStream, SystemConsumer systemConsumer, SystemAdmin systemAdmin) { this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0)); this.systemConsumer = systemConsumer; this.systemAdmin = systemAdmin; this.configMap = new HashMap<>(); this.isBootstrapped = false; this.keySerde = new JsonSerde<>(); this.messageSerde = new JsonSerde<>(); }
@Override public SystemStreamPartition deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JsonProcessingException { ObjectCodec oc = jsonParser.getCodec(); JsonNode node = oc.readTree(jsonParser); String system = node.get("system").getTextValue(); String stream = node.get("stream").getTextValue(); Partition partition = new Partition(node.get("partition").getIntValue()); return new SystemStreamPartition(system, stream, partition); } }
/** * Convert TopicPartition to SystemStreamPartition * @param topicPartition the topic partition to be created * @return an instance of SystemStreamPartition */ private SystemStreamPartition toSystemStreamPartition(TopicPartition topicPartition) { String topic = topicPartition.topic(); Partition partition = new Partition(topicPartition.partition()); return new SystemStreamPartition(systemName, topic, partition); }
@Test public void testGetSSPMetadataZeroUpcomingOffset() { SystemStreamPartition ssp = new SystemStreamPartition(TEST_SYSTEM, VALID_TOPIC, new Partition(0)); TopicPartition topicPartition = new TopicPartition(VALID_TOPIC, 0); when(mockKafkaConsumer.beginningOffsets(ImmutableList.of(topicPartition))).thenReturn( ImmutableMap.of(topicPartition, -1L)); when(mockKafkaConsumer.endOffsets(ImmutableList.of(topicPartition))).thenReturn( ImmutableMap.of(topicPartition, 0L)); Map<SystemStreamPartition, SystemStreamMetadata.SystemStreamPartitionMetadata> expected = ImmutableMap.of(ssp, new SystemStreamMetadata.SystemStreamPartitionMetadata("0", null, "0")); assertEquals(kafkaSystemAdmin.getSSPMetadata(ImmutableSet.of(ssp)), expected); }
@Test public void testGetSSPMetadataEmptyUpcomingOffset() { SystemStreamPartition ssp = new SystemStreamPartition(TEST_SYSTEM, VALID_TOPIC, new Partition(0)); TopicPartition topicPartition = new TopicPartition(VALID_TOPIC, 0); when(mockKafkaConsumer.beginningOffsets(ImmutableList.of(topicPartition))).thenReturn( ImmutableMap.of(topicPartition, 0L)); when(mockKafkaConsumer.endOffsets(ImmutableList.of(topicPartition))).thenReturn(ImmutableMap.of()); Map<SystemStreamPartition, SystemStreamMetadata.SystemStreamPartitionMetadata> expected = ImmutableMap.of(ssp, new SystemStreamMetadata.SystemStreamPartitionMetadata("0", null, null)); assertEquals(kafkaSystemAdmin.getSSPMetadata(ImmutableSet.of(ssp)), expected); }
public static TaskModel getTaskModel(int partitionId) { return new TaskModel(getTaskName(partitionId), new HashSet<>( Arrays.asList(new SystemStreamPartition[]{new SystemStreamPartition("System", "Stream", new Partition(partitionId))})), new Partition(partitionId)); }
@Test public void testEndOfStreamMessage() { EndOfStreamMessage eos = new EndOfStreamMessage("test-task"); produceMessages(eos); Set<SystemStreamPartition> sspsToPoll = IntStream.range(0, PARTITION_COUNT) .mapToObj(partition -> new SystemStreamPartition(SYSTEM_STREAM, new Partition(partition))) .collect(Collectors.toSet()); List<IncomingMessageEnvelope> results = consumeRawMessages(sspsToPoll); assertEquals(1, results.size()); assertTrue(results.get(0).isEndOfStream()); }
public void register() { SystemStreamPartition ssp = new SystemStreamPartition(stream, new Partition(0)); consumer.register(ssp, ""); isRegistered = true; }
@Test(expected = SamzaException.class) public void testInvalidPartitionDescriptor() { SystemStreamPartition ssp = new SystemStreamPartition("hdfs", "testStream", new Partition(0)); new MultiFileHdfsReader(HdfsReaderFactory.ReaderType.AVRO, ssp, new ArrayList<>(), "0:0"); Assert.fail(); }
@Test(expected = SamzaException.class) public void testOutOfRangeFileIndex() { SystemStreamPartition ssp = new SystemStreamPartition("hdfs", "testStream", new Partition(0)); new MultiFileHdfsReader(HdfsReaderFactory.ReaderType.AVRO, ssp, Arrays.asList(descriptors), "3:0"); Assert.fail(); }
/** * Given an SSP and offset, setStartingOffset should delegate to the offset manager. */ @Test public void testSetStartingOffset() { SystemStreamPartition ssp = new SystemStreamPartition("mySystem", "myStream", new Partition(0)); taskContext.setStartingOffset(ssp, "123"); verify(offsetManager).setStartingOffset(TASK_NAME, ssp, "123"); }
@Test public void testStartpointLatest() { StartpointUpcoming startpoint = new StartpointUpcoming(); Assert.assertTrue(startpoint.getCreationTimestamp() <= Instant.now().toEpochMilli()); MockStartpointVisitor mockStartpointVisitorConsumer = new MockStartpointVisitor(); startpoint.apply(new SystemStreamPartition("sys", "stream", new Partition(1)), mockStartpointVisitorConsumer); Assert.assertEquals(StartpointUpcoming.class, mockStartpointVisitorConsumer.visitedClass); }
@Test public void testStartpointTimestamp() { StartpointTimestamp startpoint = new StartpointTimestamp(2222222L); Assert.assertEquals(2222222L, startpoint.getTimestampOffset().longValue()); Assert.assertTrue(startpoint.getCreationTimestamp() <= Instant.now().toEpochMilli()); MockStartpointVisitor mockStartpointVisitorConsumer = new MockStartpointVisitor(); startpoint.apply(new SystemStreamPartition("sys", "stream", new Partition(1)), mockStartpointVisitorConsumer); Assert.assertEquals(StartpointTimestamp.class, mockStartpointVisitorConsumer.visitedClass); }
@Test public void testStartpointEarliest() { StartpointOldest startpoint = new StartpointOldest(); Assert.assertTrue(startpoint.getCreationTimestamp() <= Instant.now().toEpochMilli()); MockStartpointVisitor mockStartpointVisitorConsumer = new MockStartpointVisitor(); startpoint.apply(new SystemStreamPartition("sys", "stream", new Partition(1)), mockStartpointVisitorConsumer); Assert.assertEquals(StartpointOldest.class, mockStartpointVisitorConsumer.visitedClass); }
public CoordinatorStreamStore(String namespace, Config config, MetricsRegistry metricsRegistry) { this.config = config; this.type = namespace; this.keySerde = new CoordinatorStreamKeySerde(type); this.coordinatorSystemStream = CoordinatorStreamUtil.getCoordinatorSystemStream(config); this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0)); SystemFactory systemFactory = CoordinatorStreamUtil.getCoordinatorSystemFactory(config); this.systemProducer = systemFactory.getProducer(this.coordinatorSystemStream.getSystem(), config, metricsRegistry); this.systemConsumer = systemFactory.getConsumer(this.coordinatorSystemStream.getSystem(), config, metricsRegistry); this.systemAdmin = systemFactory.getAdmin(this.coordinatorSystemStream.getSystem(), config); }
@Test public void testStartpointSpecific() { StartpointSpecific startpoint = new StartpointSpecific("123"); Assert.assertEquals("123", startpoint.getSpecificOffset()); Assert.assertTrue(startpoint.getCreationTimestamp() <= Instant.now().toEpochMilli()); MockStartpointVisitor mockStartpointVisitorConsumer = new MockStartpointVisitor(); startpoint.apply(new SystemStreamPartition("sys", "stream", new Partition(1)), mockStartpointVisitorConsumer); Assert.assertEquals(StartpointSpecific.class, mockStartpointVisitorConsumer.visitedClass); }
@Test public void testStartpointCustom() { MockStartpointCustom startpoint = new MockStartpointCustom("test12345", 12345); Assert.assertEquals("test12345", startpoint.getTestInfo1()); Assert.assertEquals(12345, startpoint.getTestInfo2()); Assert.assertTrue(startpoint.getCreationTimestamp() <= Instant.now().toEpochMilli()); MockStartpointVisitor mockStartpointVisitorConsumer = new MockStartpointVisitor(); startpoint.apply(new SystemStreamPartition("sys", "stream", new Partition(1)), mockStartpointVisitorConsumer); Assert.assertEquals(MockStartpointCustom.class, mockStartpointVisitorConsumer.visitedClass); }