public boolean shouldHandle(Tuple tuple) { if (TupleUtils.isTick(tuple)) { LOG.debug("TICK received! current batch status [{}/{}]", tupleBatch.size(), batchSize); forceFlush = true; return false; } else { return true; } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), flushIntervalInSeconds); } }
@Override public List<Integer> chooseTasks(int taskId, List<Object> values) { int targetTaskIndex = TupleUtils.chooseTaskIndex(outFields.select(groupFields, values), numTasks); return targetTasks.get(targetTaskIndex); }
public static <T> int chooseTaskIndex(List<T> keys, int numTasks) { return Math.floorMod(listHashCode(keys), numTasks); }
@Override public void execute(Tuple tuple) { if (TupleUtils.isTick(tuple)) { LOG.debug("Received tick tuple, triggering emit of current window counts"); emitCurrentWindowCounts(); } else { countObjAndAck(tuple); } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), flushIntervalSecs); }
tasks.sort(null); int taskIndex = TupleUtils.chooseTaskIndex(Collections.singletonList(componentId), tasks.size()); int taskId = tasks.get(taskIndex); String host = null;
/** * {@inheritDoc} * * @param tuple the tuple to process. */ @Override public void execute(final Tuple tuple) { if (TupleUtils.isTick(tuple)) { onTickTuple(tuple); } else { process(tuple); } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(new Config(), flushIntervalSecs); }
/** * This method functions as a template method (design pattern). */ @Override public final void execute(Tuple tuple, BasicOutputCollector collector) { if (TupleUtils.isTick(tuple)) { getLogger().debug("Received tick tuple, triggering emit of current rankings"); emitRankings(collector); } else { updateRankingsWithTuple(tuple); } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), tickTupleInterval); }
@Override public void execute(Tuple tuple) { if (TupleUtils.isTick(tuple)) { // if we have a tick tuple then lets see if enough time has passed since our last batch was processed if ((System.currentTimeMillis() / 1000 - lastBatchProcessTimeSeconds) >= batchIntervalInSec) { LOGGER.debug("Received tick tuple and reached batch interval, executing batch"); finishBatch(); } else { LOGGER.debug("Received tick tuple, but haven't reached batch interval, nothing to do"); } } else { // for a regular tuple we add it to the queue and then see if our queue size exceeds batch size this.queue.add(tuple); int queueSize = this.queue.size(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Current queue size is " + queueSize + ", and batch size is " + batchSize); } if (queueSize >= batchSize) { LOGGER.debug("Queue Size is greater than or equal to batch size, executing batch"); finishBatch(); } } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), flushIntervalSecs); }
private void ack(Tuple tuple) throws SolrServerException, IOException { if (commitStgy == null) { collector.ack(tuple); } else { final boolean isTickTuple = TupleUtils.isTick(tuple); if (!isTickTuple) { // Don't ack tick tuples toCommitTuples.add(tuple); commitStgy.update(); } if (isTickTuple || commitStgy.commit()) { solrClient.commit(solrMapper.getCollection()); ackCommittedTuples(); } } }
@Override public Map<String, Object> getComponentConfiguration() { if (solrConfig.getTickTupleInterval() > 0) { this.tickTupleInterval = solrConfig.getTickTupleInterval(); } return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), tickTupleInterval); }
@Override public void execute(Tuple tuple) { LOG.info("GOT {} at time {}", tuple, Time.currentTimeMillis()); if (!receivedAnyTuple.get() && Time.currentTimeSecs() > TICK_INTERVAL_SECS) { throw new RuntimeException("Simulated time was higher than " + TICK_INTERVAL_SECS + " at start of test." + " Increase the interval until this no longer occurs, but keep an eye on Storm's timeouts for e.g. worker heartbeat."); } receivedAnyTuple.set(true); if (tickTupleCount.get() > 3) { throw new RuntimeException("Unexpectedly many tick tuples"); } if (TupleUtils.isTick(tuple)) { tickTupleCount.incrementAndGet(); collector.ack(tuple); } else { if (tuple.getValues().size() == 1 && "val".equals(tuple.getValue(0))) { collector.ack(tuple); } else { nonTickTuple.set(tuple); } } }
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), flushIntervalInSeconds); } }
String writerKey = null; if (TupleUtils.isTick(tuple)) { LOG.debug("TICK! forcing a file system flush"); this.collector.ack(tuple);
@Override public Map<String, Object> getComponentConfiguration() { return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), flushIntervalSecs); }
@Override public void execute(Tuple tuple) { if (TupleUtils.isTick(tuple)) { return; } try { //get document Document doc = mapper.toDocument(tuple); //get query filter Bson filter = queryCreator.createFilter(tuple); mongoClient.update(filter, doc, upsert, many); this.collector.ack(tuple); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(tuple); } }