public void putAll(List<Event> events) throws ChannelException { getChannelProcessor().processEventBatch(events); }
private void flushEventBatch(List<Event> eventList) { channelProcessor.processEventBatch(eventList); sourceCounter.addToEventAcceptedCount(eventList.size()); eventList.clear(); lastPushToChannel = systemClock.currentTimeMillis(); }
public ResultCode Log(List<LogEntry> list) throws TException { if (list != null) { sourceCounter.addToEventReceivedCount(list.size()); try { List<Event> events = new ArrayList<Event>(list.size()); for (LogEntry entry : list) { Map<String, String> headers = new HashMap<String, String>(1, 1); String category = entry.getCategory(); if (category != null) { headers.put(SCRIBE_CATEGORY, category); } Event event = EventBuilder.withBody(entry.getMessage().getBytes(), headers); events.add(event); } if (events.size() > 0) { getChannelProcessor().processEventBatch(events); } sourceCounter.addToEventAcceptedCount(list.size()); return ResultCode.OK; } catch (Exception e) { LOG.warn("Scribe source handling failure", e); sourceCounter.incrementEventReadOrChannelFail(e); } } return ResultCode.TRY_LATER; } }
sourceCounter.addToEventReceivedCount(events.size()); try { getChannelProcessor().processEventBatch(events); } catch (ChannelException ex) { LOG.warn("Error appending event to channel. "
sourceCounter.addToEventReceivedCount(events.size()); getChannelProcessor().processEventBatch(events);
getChannelProcessor().processEventBatch(batchArrayList); sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(batchArrayList.size());
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> flumeEvents = Lists.newArrayList(); for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } try { getChannelProcessor().processEventBatch(flumeEvents); } catch (ChannelException ex) { logger.warn("Thrift source %s could not append events to the channel.", getName()); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; } }
sourceCounter.incrementAppendBatchReceivedCount(); try { getChannelProcessor().processEventBatch(events); reader.commit(); } catch (ChannelException ex) {
@Override public Status appendBatch(List<AvroFlumeEvent> events) { logger.debug("Avro source {}: Received avro event batch of {} events.", getName(), events.size()); sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> batch = new ArrayList<Event>(); for (AvroFlumeEvent avroEvent : events) { Event event = EventBuilder.withBody(avroEvent.getBody().array(), toStringMap(avroEvent.getHeaders())); batch.add(event); } try { getChannelProcessor().processEventBatch(batch); } catch (Throwable t) { logger.error("Avro source " + getName() + ": Unable to process event " + "batch. Exception follows.", t); sourceCounter.incrementChannelWriteFail(); if (t instanceof Error) { throw (Error) t; } return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; }
counter.addToKafkaEventGetTimer((System.nanoTime() - nanoBatchStartTime) / (1000 * 1000)); counter.addToEventReceivedCount((long) eventList.size()); getChannelProcessor().processEventBatch(eventList); counter.addToEventAcceptedCount(eventList.size()); if (log.isDebugEnabled()) {
channelProcessor.processEventBatch(events); sourceCounter.addToEventAcceptedCount(numEvents); } catch (Throwable t) {
getChannelProcessor().processEventBatch(events); reader.commit(); } catch (ChannelFullException ex) {
getChannelProcessor().processEventBatch(eventBatchListToProcess);
getChannelProcessor().processEventBatch(events); error = false; sourceCounter.addToEventAcceptedCount(size);
@Override public void flush() throws IOException { getChannelProcessor().processEventBatch(events); events.clear(); }
private void flushEventBatch(List<Event> eventList){ channelProcessor.processEventBatch(eventList); sourceCounter.addToEventAcceptedCount(eventList.size()); eventList.clear(); lastPushToChannel = systemClock.currentTimeMillis(); }
private void flushEventBatch(List<Event> eventList) { channelProcessor.processEventBatch(eventList); sourceCounter.addToEventAcceptedCount(eventList.size()); eventList.clear(); lastPushToChannel = systemClock.currentTimeMillis(); }
@Override protected Status doProcess() throws EventDeliveryException { Status status = Status.BACKOFF; if(TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - lastPoll) > refreshInterval) { final List<Event> events = new ArrayList<Event>(tickers.size()); Map<String, Float> prices = server.getQuote(tickers); lastPoll = System.currentTimeMillis(); // Convert each price into ticker = price form in UTF-8 as event body for(Map.Entry<String, Float> e: prices.entrySet()) { StringBuilder builder = new StringBuilder(e.getKey()); builder.append(" = ").append(e.getValue()); events.add(EventBuilder.withBody(builder.toString().getBytes(Charsets .UTF_8))); } getChannelProcessor().processEventBatch(events); status = Status.READY; } return status; }
@Override public void processRecords(List<Record> records, IRecordProcessorCheckpointer checkpointer) { final List<Event> events = new ArrayList<Event>(records.size()); for (Record record: records) { events.add(EventBuilder.withBody(record.getData().array())); } try { getChannelProcessor().processEventBatch(events); checkpoint(checkpointer); } catch (Exception ex) { // Shutdown this worker, so we can restart from the last // committed position. this.shutdown(checkpointer, null); } }
@Override public Status appendBatch(List<ThriftFlumeEvent> events) throws TException { sourceCounter.incrementAppendBatchReceivedCount(); sourceCounter.addToEventReceivedCount(events.size()); List<Event> flumeEvents = Lists.newArrayList(); for (ThriftFlumeEvent event : events) { flumeEvents.add(EventBuilder.withBody(event.getBody(), event.getHeaders())); } try { getChannelProcessor().processEventBatch(flumeEvents); } catch (ChannelException ex) { logger.warn("Thrift source %s could not append events to the channel.", getName()); sourceCounter.incrementChannelWriteFail(); return Status.FAILED; } sourceCounter.incrementAppendBatchAcceptedCount(); sourceCounter.addToEventAcceptedCount(events.size()); return Status.OK; } }