public static <V> List<V> scalaToJavaList(scala.collection.Seq<V> scalaList) { List<V> javaList = Lists.newArrayList(); scala.collection.Iterator<V> iterator = scalaList.iterator(); while (iterator.hasNext()) { javaList.add(iterator.next()); } return javaList; } }
/** * Returns the set of all topics in the Kafka cluster * * @return unmodifiable set of all topics in the Kafka cluster * * @throws AdminOperationException * if there is an issue retrieving the set of all topics */ public Set<String> getTopics() { LOG.debug("Retrieving all topics"); try { return Collections.unmodifiableSet(convertToJavaSet(zkUtils.getAllTopics().iterator())); } catch (ZkException e) { throw new AdminOperationException("Unable to retrieve all topics", e); } }
/** * Returns the replication factor for the given topic * * @param topic * a Kafka topic * @return the replication factor for the given topic * * @throws IllegalArgumentException * if topic is null, empty or blank * @throws AdminOperationException * if there is an issue retrieving the replication factor */ public int getTopicReplicationFactor(String topic) { if (StringUtils.isBlank(topic)) throw new IllegalArgumentException("topic cannot be null, empty or blank"); try { return convertToJavaSet(zkUtils.getReplicasForPartition(topic, 0).iterator()).size(); } catch (ZkException | KafkaException e) { throw new AdminOperationException("Unable to read replication factor for topic: " + topic, e); } }
/** * Tokenize with the builder options into a String Iterable. * * @param tokens Korean tokens (output of tokenize(CharSequence text)). * @return List of token strings. */ public static List<String> tokensToJavaStringList(Seq<KoreanToken> tokens, boolean keepSpace) { Iterator<KoreanToken> tokenized = tokens.iterator(); List<String> output = new LinkedList<>(); while (tokenized.hasNext()) { final KoreanToken token = tokenized.next(); if (keepSpace || token.pos() != KoreanPos.Space()) { output.add(token.text()); } } return output; }
Set<Object> partitionList = convertToJavaSet(partitions.iterator()); if (partitionList.isEmpty()) { throw new AdminOperationException("Partition count is 0 for topic: " + topic);
@Override public void run(ResultIterator resultIterator) throws Exception { ScalaRootScope rootScope = ((ScalaParserResult) resultIterator.getParserResult()).rootScope(); if (rootScope == null) { return; } rootScope.visibleDfns(ElementKind.CLASS); scala.collection.Seq<AstDfn> tmpls = rootScope.visibleDfns(ElementKind.CLASS); if (!tmpls.isEmpty()) { scala.collection.Iterator itr = tmpls.iterator(); while (itr.hasNext()) { AstDfn tmpl = (AstDfn) itr.next(); if (classes[0].length() > 0) { classes[0] = classes[0] + " "; // NOI18N } classes[0] = classes[0] + tmpl.getName().toString().replace('.', '/') + "*.class"; // NOI18N } } } });
@Override public void incrementMessagesDropped(Throwable cause) { if (cause instanceof FinagleSender.WrappedException) cause = cause.getCause(); Seq<Traversable<String>> paths = Throwables.mkString(cause).inits().toSeq(); for (Iterator<Traversable<String>> i = paths.iterator(); i.hasNext();) { messagesDropped.counter(i.next().toSeq()).incr(); } }
@Override public void incrementMessagesDropped(Throwable cause) { if (cause instanceof FinagleSender.WrappedException) cause = cause.getCause(); Seq<Traversable<String>> paths = Throwables.mkString(cause).inits().toSeq(); for (Iterator<Traversable<String>> i = paths.iterator(); i.hasNext();) { messagesDropped.counter(i.next().toSeq()).incr(); } }
@Override protected Seq<KoreanToken> perform(Seq<KoreanToken> tokens) { KoreanToken[] performed = new KoreanToken[tokens.length()]; int i = 0; Iterator<KoreanToken> tokenIterator = tokens.iterator(); while (tokenIterator.hasNext()) { KoreanToken token = tokenIterator.next(); performed[i++] = token.stem().nonEmpty() ? stem(token) : token; } return JavaConverters.asScalaBuffer(Arrays.asList(performed)).toSeq(); }
@Override public void incrementMessagesDropped(Throwable cause) { Seq<Traversable<String>> paths = Throwables.mkString(cause).inits().toSeq(); for (Iterator<Traversable<String>> i = paths.iterator(); i.hasNext();) { messagesDropped.counter(i.next().toSeq()).incr(); } }
/** * Transforms the tokenization output to List<KoreanTokenJava> * * @param tokens Korean tokens (output of tokenize(CharSequence text)). * @return List of KoreanTokenJava. */ public static List<KoreanTokenJava> tokensToJavaKoreanTokenList(Seq<KoreanToken> tokens, boolean keepSpace) { Iterator<KoreanToken> tokenized = tokens.iterator(); List<KoreanTokenJava> output = new LinkedList<>(); while (tokenized.hasNext()) { KoreanToken token = tokenized.next(); String stem = ""; if (token.stem().nonEmpty()) { stem += token.stem().get(); } if (keepSpace || token.pos() != KoreanPos.Space()) { output.add(new KoreanTokenJava( token.text(), KoreanPosJava.valueOf(token.pos().toString()), token.offset(), token.length(), token.unknown(), stem )); } } return output; }
private Seq<KoreanToken> convertPhrasesToTokens(Seq<KoreanPhrase> phrases) { KoreanToken[] tokens = new KoreanToken[phrases.length()]; Iterator<KoreanPhrase> iterator = phrases.iterator(); int i = 0; while (iterator.hasNext()) { KoreanPhrase phrase = iterator.next(); tokens[i++] = new KoreanToken(phrase.text(), phrase.pos(), phrase.offset(), phrase.length(), scala.Option.apply(null), false); } Arrays.sort(tokens, (o1, o2) -> { if(o1.offset()== o2.offset()) return 0; return o1.offset()< o2.offset()? -1 : 1; }); return JavaConverters.asScalaBuffer(Arrays.asList(tokens)).toSeq(); } }
@Override public void onTaskEnd(SparkListenerTaskEnd taskEnd) { Iterator<AccumulatorV2<?, ?>> iterator = taskEnd.taskMetrics().accumulators().iterator(); while (iterator.hasNext()) { AccumulatorV2 accumulator = iterator.next(); if (taskEnd.stageId() == 1 && accumulator.isRegistered() && accumulator.name().isDefined() && accumulator.name().get().equals("internal.metrics.shuffle.read.recordsRead")) { stageOneShuffleReadTaskRecordsCountMap.put(taskEnd.taskInfo().taskId(), (Long) accumulator.value()); } } } });