public StringSerde() { super(new StringSerializer(), new StringDeserializer()); } }
@SuppressWarnings("resource") private int getPartitionIndex(Consumer<String, byte[]> consumer, String topic, String key) { int partitionNumber = consumer.partitionsFor(topic).size(); StringSerializer keySerializer = new StringSerializer(); byte[] serializedKey = keySerializer.serialize(topic, key); int positive = Utils.murmur2(serializedKey) & 0x7fffffff; return positive % partitionNumber; } }
@Before public void init() { entity = new DummyEntity(); entity.intValue = 19; entity.longValue = 7L; entity.stringValue = "dummy"; List<String> list = Arrays.asList("dummy1", "dummy2"); entity.complexStruct = new HashMap<>(); entity.complexStruct.put((short) 4, list); entityArray = new DummyEntity[] { entity }; topic = "topic-name"; jsonReader = new JsonDeserializer<DummyEntity>() { }; jsonReader.configure(new HashMap<>(), false); jsonReader.close(); // does nothing, so may be called any time, or not called at all jsonArrayReader = new JsonDeserializer<DummyEntity[]>() { }; jsonArrayReader.configure(new HashMap<>(), false); jsonArrayReader.close(); // does nothing, so may be called any time, or not called at all jsonWriter = new JsonSerializer<>(); jsonWriter.configure(new HashMap<>(), false); jsonWriter.close(); // does nothing, so may be called any time, or not called at all stringReader = new StringDeserializer(); stringReader.configure(new HashMap<>(), false); stringWriter = new StringSerializer(); stringWriter.configure(new HashMap<>(), false); dummyEntityJsonDeserializer = new DummyEntityJsonDeserializer(); dummyEntityArrayJsonDeserializer = new DummyEntityArrayJsonDeserializer(); }
public long send (String topic, long targetRecords, boolean debugMode) { long sentRecords = 0L; long sentBytes = 0L; while (sentRecords < targetRecords) { String line = cachedData.getRecord(); String currentTime = Long.toString(System.currentTimeMillis()); // Key and Value will be serialized twice. // 1. in producer.send method // 2. explicitly serialize here to count byte size. byte[] keyByte = serializer.serialize(topic, currentTime); byte[] valueByte = fillArray(keyByte, serializer.serialize(topic, line)); ProducerRecord serializedRecord = new ProducerRecord(topic, keyByte, valueByte); kafkaProducer.send(serializedRecord, callback); //update counter sentRecords++; sentBytes = sentBytes + keyByte.length + valueByte.length; } return sentRecords; }
@Override public void configure(Map<String, ?> configs, boolean isKey) { stringSerializer.configure(configs,isKey); }
deliveredMessageOffsetTracker, null, (tp) -> null); StringSerializer stringSerializer = new StringSerializer(); ConsumerRecord<byte[], byte[]> consumerRecord0 = new ConsumerRecord<>("topic", 0, 0, null, stringSerializer.serialize("topic", "value")); ConsumerRecord<byte[], byte[]> consumerRecord1 = new ConsumerRecord<>("topic", 0, 1, null, stringSerializer.serialize("topic", "ErrorBytes")); ConsumerRecord<byte[], byte[]> consumerRecord2 = new ConsumerRecord<>("topic", 0, 2, null, stringSerializer.serialize("topic", "value")); stringSerializer.serialize("topic", "ErrorBytes")); ConsumerRecord<byte[], byte[]> consumerRecord4 = new ConsumerRecord<>("topic", 1, 1, null, stringSerializer.serialize("topic", "value")); stringSerializer.serialize("topic", "value"));
private StringSerializer getSerializer(Map<String, Object> configuration, boolean isKey) { StringSerializer serializer = new StringSerializer(); serializer.configure(configuration, isKey); return serializer; }
@Test public void testSerializedStringNullEqualsNull() { assertThat(stringWriter.serialize(topic, null)).isEqualTo(null); }
/** {@inheritDoc} */ @Override public void configure(Map<String, ?> configs, boolean isKey) { type.configure(configs, isKey); value.configure(configs, isKey); } // Close configure.
/** * Returns a producer that uses {@link StringSerializer} for * keys and {@link ByteArraySerializer} for values. * * @return An {@link Optional} of {@link KafkaProducer}. */ public Optional<KafkaProducer<String, byte[]>> getDefaultProducer() { return getProducer(new StringSerializer(), new ByteArraySerializer()); }
@Test public void testDeserializeSerializedDummyException() { try { jsonReader.deserialize(topic, stringWriter.serialize(topic, "dummy")); fail("Expected SerializationException"); } catch (SerializationException e) { assertThat(e.getMessage()).startsWith("Can't deserialize data ["); assertThat(e.getCause()).isInstanceOf(JsonParseException.class); } catch (Exception e) { fail("Expected SerializationException, not " + e.getClass()); } try { Headers headers = new RecordHeaders(); headers.add(AbstractJavaTypeMapper.DEFAULT_CLASSID_FIELD_NAME, "com.malware.DummyEntity".getBytes()); dummyEntityJsonDeserializer.deserialize(topic, headers, jsonWriter.serialize(topic, entity)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains("not in the trusted packages"); } catch (Exception e) { fail("Expected IllegalArgumentException, not " + e.getClass()); } }
@Override public void configure(Map<String, ?> configs) { StringConverterConfig conf = new StringConverterConfig(configs); String encoding = conf.encoding(); Map<String, Object> serializerConfigs = new HashMap<>(configs); Map<String, Object> deserializerConfigs = new HashMap<>(configs); serializerConfigs.put("serializer.encoding", encoding); deserializerConfigs.put("deserializer.encoding", encoding); boolean isKey = conf.type() == ConverterType.KEY; serializer.configure(serializerConfigs, isKey); deserializer.configure(deserializerConfigs, isKey); }
@Test public void testMetricsReporterAutoGeneratedClientId() { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName()); KafkaProducer<String, String> producer = new KafkaProducer<>( props, new StringSerializer(), new StringSerializer()); MockMetricsReporter mockMetricsReporter = (MockMetricsReporter) producer.metrics.reporters().get(0); Assert.assertEquals(producer.getClientId(), mockMetricsReporter.clientId); producer.close(); }
@Override public byte[] serialize(String topic, Object data) { String str = null; try { str = om.writeValueAsString(data); } catch (IOException e) { logger.error("Kafka serialization for send error!", e); } return stringSerializer.serialize(topic, str); }
@Test public void testConstructorWithNotStringKey() { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.put(1, "not string key"); try (KafkaProducer<?, ?> ff = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer())) { fail("Constructor should throw exception"); } catch (ConfigException e) { assertTrue("Unexpected exception message: " + e.getMessage(), e.getMessage().contains("not string key")); } }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } }
@Test public void testMetadataWithPartitionOutOfRange() throws Exception { Map<String, Object> configs = new HashMap<>(); configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); configs.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 60000); // Create a record with a partition higher than the initial (outdated) partition range ProducerRecord<String, String> record = new ProducerRecord<>(topic, 2, null, "value"); Metadata metadata = mock(Metadata.class); MockTime mockTime = new MockTime(); when(metadata.fetch()).thenReturn(onePartitionCluster, onePartitionCluster, threePartitionCluster); KafkaProducer<String, String> producer = new KafkaProducer<String, String>(configs, new StringSerializer(), new StringSerializer(), metadata, new MockClient(Time.SYSTEM, metadata), null, mockTime) { @Override Sender newSender(LogContext logContext, KafkaClient kafkaClient, Metadata metadata) { // give Sender its own Metadata instance so that we can isolate Metadata calls from KafkaProducer return super.newSender(logContext, kafkaClient, new Metadata(0, 100_000, true)); } }; // One request update if metadata is available but outdated for the given record producer.send(record); verify(metadata, times(2)).requestUpdate(); verify(metadata, times(2)).awaitUpdate(anyInt(), anyLong()); verify(metadata, times(3)).fetch(); producer.close(Duration.ofMillis(0)); }
@Override public byte[] serialize(String topic, AggregateEntity data) { String str = null; try { str = om.writeValueAsString(data.getData()); } catch (IOException e) { logger.error("Kafka serialization for send error!", e); } return stringSerializer.serialize(topic, str); }
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(configs, new StringSerializer(), new StringSerializer(), metadata, new MockClient(Time.SYSTEM, metadata), null, Time.SYSTEM) { @Override Sender newSender(LogContext logContext, KafkaClient kafkaClient, Metadata metadata) {
@Override public byte[] serialize(String topic, Row data) { if (data == null) { return null; } String message = joiner.join(RowUtils.valuesFor(data)); return stringSerializer.serialize(null, message); }