private static Seq<String> gaugeName(String name) { return scala.collection.JavaConversions.asScalaBuffer(Arrays.asList(name)).toList(); }
@Override public Object deserialize(JsonNode n, ObjectMapper mapper) { List<Object> o = new ArrayList<Object>(); try { logger.debug("using custom array deserializer"); for (int i = 0; i < n.size(); i++) { o.add(parent.deserialize(n.get(i), mapper)); } } catch (Exception e) { logger.error("exception deserializing Collection {}", e.getMessage()); o = null; } if (o != null) return scala.collection.JavaConversions.asScalaBuffer(o).toList(); return null; } }
@SuppressWarnings("unchecked") @Override public Object deserialize(JsonNode n, ObjectMapper mapper) { org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper); String subtype = deserializeObject.getLeft(); if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) { return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) { List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight(); List<Object> oo = new ArrayList<Object>(); for (Map<String, Object> row : rows) { oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms())); } return scala.collection.JavaConversions.collectionAsScalaIterable(oo); } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) { List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight(); ArrayList<Object> ll = new ArrayList<Object>(); for (List<?> ob : matrix) { ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList()); } return scala.collection.JavaConversions.asScalaBuffer(ll).toList(); } return deserializeObject.getRight(); }
public void start() { Duration[] defaultLatchIntervals = {Duration.apply(1, TimeUnit.MINUTES)}; @SuppressWarnings("deprecation") AdminServiceFactory adminServiceFactory = new AdminServiceFactory( this.mPort, 20, List$.MODULE$.<StatsFactory>empty(), Option.<String>empty(), List$.MODULE$.<Regex>empty(), Map$.MODULE$.<String, CustomHttpHandler>empty(), JavaConversions .asScalaBuffer(Arrays.asList(defaultLatchIntervals)).toList() ); RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment(this); adminServiceFactory.apply(runtimeEnvironment); try { Properties properties = new Properties(); properties.load(this.getClass().getResource("build.properties").openStream()); String buildRevision = properties.getProperty("build_revision", "unknown"); LOG.info("build.properties build_revision: {}", properties.getProperty("build_revision", "unknown")); StatsUtil.setLabel("secor.build_revision", buildRevision); } catch (Throwable t) { LOG.error("Failed to load properties from build.properties", t); } } }
scala.collection.JavaConversions.asScalaBuffer(Arrays.asList("num_streams")).toList(); cacheStatReceiver.provideGauge(numCachedStreamsGaugeName, new Function0<Object>() { @Override scala.collection.JavaConversions.asScalaBuffer(Arrays.asList("num_hosts")).toList(); cacheStatReceiver.provideGauge(numCachedHostsGaugeName, new Function0<Object>() { @Override
@Override public TopicMetadataResponse send(TopicMetadataRequest request) { java.util.List<String> topics = request.topics(); TopicMetadata[] topicMetadataArray = new TopicMetadata[topics.size()]; for (int i = 0; i < topicMetadataArray.length; i++) { String topic = topics.get(i); if (!topic.equals(topicName)) { topicMetadataArray[i] = new TopicMetadata(topic, null, Errors.UNKNOWN_TOPIC_OR_PARTITION.code()); } else { PartitionMetadata[] partitionMetadataArray = new PartitionMetadata[partitionCount]; for (int j = 0; j < partitionCount; j++) { java.util.List<BrokerEndPoint> emptyJavaList = Collections.emptyList(); List<BrokerEndPoint> emptyScalaList = JavaConversions.asScalaBuffer(emptyJavaList).toList(); partitionMetadataArray[j] = new PartitionMetadata(j, Some.apply(brokerArray[partitionLeaderIndices[j]]), emptyScalaList, emptyScalaList, Errors.NONE.code()); } Seq<PartitionMetadata> partitionsMetadata = List.fromArray(partitionMetadataArray); topicMetadataArray[i] = new TopicMetadata(topic, partitionsMetadata, Errors.NONE.code()); } } Seq<BrokerEndPoint> brokers = List.fromArray(brokerArray); Seq<TopicMetadata> topicsMetadata = List.fromArray(topicMetadataArray); return new TopicMetadataResponse(new kafka.api.TopicMetadataResponse(brokers, topicsMetadata, -1)); } }
/** * Converts a Java List to Scala Seq. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala().toList(); }
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Seq<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList(); }
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Seq<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList(); }
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Seq<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList(); }
@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD(); DataFrame df = sparkClient.sqlContext.createDataFrame(personRDD, m.getEntityClazz()); String outputFilePath = getOutputFilePath(sparkClient.properties); String ext = (String) sparkClient.properties.get("format"); FileType fileType = FileFormatConstants.extension.get(ext); switch (fileType) { case CSV: return writeDataInCsvFile(df, outputFilePath); case JSON: return writeDataInJsonFile(df, outputFilePath); default: throw new UnsupportedOperationException("Files of type " + ext + " are not yet supported."); } }
Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD javaRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD();
@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { try { Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD(); DataFrame df = sparkClient.sqlContext.createDataFrame(personRDD, m.getEntityClazz()); sparkClient.sqlContext.sql("use " + m.getSchema()); if (logger.isDebugEnabled()) { logger.info("Below are the registered table with hive context: "); sparkClient.sqlContext.sql("show tables").show(); } df.write().insertInto(m.getTableName()); return true; } catch (Exception e) { throw new KunderaException("Cannot persist object(s)", e); } }
@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { try { Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD(); CassandraJavaUtil.javaFunctions(personRDD) .writerBuilder(m.getSchema(), m.getTableName(), CassandraJavaUtil.mapToRow(m.getEntityClazz())) .saveToCassandra(); return true; } catch (Exception e) { throw new KunderaException("Cannot persist object(s)", e); } }
@Override public scala.collection.immutable.List<DStream<?>> dependencies() { return scala.collection.JavaConversions.asScalaBuffer( Collections.<DStream<?>>singletonList(parent)) .toList(); }
@Override public scala.collection.immutable.List<Class<?>> classesFromContext( Application app, ServletConfig sc) { List<Class<?>> classes = Lists.newArrayList(); addJaxrsClasses(injector, classes); return JavaConverters.asScalaBufferConverter(classes).asScala().toList(); }
private List<Class<?>> filterClasses(final List<Class<?>> classes) { final Iterator<Class<?>> eachClass = classes.iterator(); final ArrayList<Class<?>> filteredClasses = new ArrayList<Class<?>>(); while(eachClass.hasNext()) { Class clazz = eachClass.next(); // if (clazz.getName().indexOf("fluxtream")!=-1) // continue; filteredClasses.add(clazz); } return JavaConversions.asScalaBuffer(filteredClasses).toList(); }
public SwaggerInitializer(ServerData serverData) { this.resourceClasses = JavaConversions.asScalaBuffer( serverData.getResources() .stream() .map(resource -> resource.getClass()) .collect(Collectors.<Class<?>> toList())).toList(); this.baseUrlPattern = serverData.getBaseUrlPattern(); }
private void withLock(Interval... lockedRange) { Buffer<Interval> scalaBuffer = JavaConversions.asScalaBuffer(Lists.newArrayList(lockedRange)); when(timesheetLockService.findLockedDatesInRange(any(Date.class), any(Date.class), any(User.class))).thenReturn(scalaBuffer.toList()); }
public static final <A, B> Operation<java.util.List<String>, B> useThys(MarkupProcessor<A, B> processor) { Operation<scala.collection.immutable.List<String>, B> operation = Operation.UseThys( processor.init(), func(processor::markup), func(processor::finish)); return operation.<java.util.List<String>, B> map( func(x -> scala.collection.JavaConversions.asScalaBuffer(x).toList()), func(x -> x)); }