@BeforeClass public static void setUp() throws IOException { int dataNodes = 1; int port = 29999; JobConf conf = new JobConf(); channel = new MemoryChannel(); conf.set("dfs.block.access.token.enable", "false"); conf.set("dfs.permissions", "true"); conf.set("hadoop.security.authentication", "simple"); conf.set("fs.default.name", "hdfs://localhost:29999"); dfsCluster = new MiniDFSCluster(port, conf, dataNodes, true, true, null, null); fileSystem = dfsCluster.getFileSystem(); fileSystem.delete(new Path("/logs"), true); source = new AvroSource(); sink = new KaaHdfsSink(); logSchemasRootDir = new File("schemas"); if (logSchemasRootDir.exists()) { logSchemasRootDir.delete(); } prepareSchema(logSchemasRootDir); }
resizeQueue(capacity); } catch (InterruptedException e) { Thread.currentThread().interrupt(); channelCounter = new ChannelCounter(getName());
@Override protected void initialize() { super.initialize(); try { Field f = MemoryChannel.class.getDeclaredField("channelCounter"); f.setAccessible(true); channelCounterRef = (ChannelCounter) f.get(this); } catch (NoSuchFieldException e) { LOGGER.error(e.getMessage()); } catch (SecurityException e) { LOGGER.error(e.getMessage()); } catch (IllegalArgumentException e) { LOGGER.error(e.getMessage()); } catch (IllegalAccessException e) { LOGGER.error(e.getMessage()); } // try catch setupTime = new Date().getTime(); accPutsOK = 0; accPutsFail = 0; accTakesOK = 0; accTakesFail = 0; } // initialize
Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel);
resizeQueue(capacity); } catch (InterruptedException e) { Thread.currentThread().interrupt(); channelCounter = new ChannelCounter(getName());
context.put(AVRO_EVENT, "true"); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel);
Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel);
@Test public void testTopicAndKeyFromHeader() { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "test-topic-and-key-from-header"; Map<String, String> headers = new HashMap<String, String>(); headers.put("topic", TestConstants.CUSTOM_TOPIC); headers.put("key", TestConstants.CUSTOM_KEY); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes(), headers); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, TestConstants.CUSTOM_TOPIC); }
private Sink.Status prepareAndSend(Context context, String msg) throws EventDeliveryException { Sink kafkaSink = new KafkaSink(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes()); memoryChannel.put(event); tx.commit(); tx.close(); return kafkaSink.process(); }
Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel);
@Test public void testReplaceSubStringOfTopicWithHeaders() { String topic = TestConstants.HEADER_1_VALUE + "-topic"; Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); context.put(TOPIC_CONFIG, TestConstants.HEADER_TOPIC); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "test-replace-substring-of-topic-with-headers"; Map<String, String> headers = new HashMap<>(); headers.put(TestConstants.HEADER_1_KEY, TestConstants.HEADER_1_VALUE); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes(), headers); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, topic); }
context.put(KafkaSinkConstants.TOPIC_OVERRIDE_HEADER, customTopicHeader); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel);
@Test public void testDefaultTopic() { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "default-topic-test"; Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes()); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, DEFAULT_TOPIC); }
@Test public void testEmptyChannel() throws EventDeliveryException { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); Sink.Status status = kafkaSink.process(); if (status != Sink.Status.BACKOFF) { fail("Error Occurred"); } ConsumerRecords recs = pollConsumerRecords(DEFAULT_TOPIC, 2); assertNotNull(recs); assertEquals(recs.count(), 0); }
private Channel initChannel() { //Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }
private Channel initChannel() { //Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }
private Channel initChannel() { //Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }
private Channel initChannel() { //Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }
private Channel initChannel() { // Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }
private Channel initChannel() { // Channel configuration Context channelContext = new Context(); channelContext.put("capacity", "10000"); channelContext.put("transactionCapacity", "200"); Channel channel = new MemoryChannel(); channel.setName("memorychannel"); Configurables.configure(channel, channelContext); return channel; }