private static JavaPairRDD<String,float[]> readFeaturesRDD(JavaSparkContext sparkContext, Path path) { log.info("Loading features RDD from {}", path); JavaRDD<String> featureLines = sparkContext.textFile(path.toString()); return featureLines.mapToPair(line -> { List<?> update = TextUtils.readJSON(line, List.class); String key = update.get(0).toString(); float[] vector = TextUtils.convertViaJSON(update.get(1), float[].class); return new Tuple2<>(key, vector); }); }
@Override public void consumeKeyMessage(String key, String message, Configuration hadoopConf) throws IOException { switch (key) { case "UP": if (model == null) { return; // No model to interpret with yet, so skip it } List<?> update = TextUtils.readJSON(message, List.class); // Update int id = Integer.parseInt(update.get(0).toString()); double[] center = TextUtils.convertViaJSON(update.get(1), double[].class); long count = Long.parseLong(update.get(2).toString()); model.update(id, center, count); break; case "MODEL": case "MODEL-REF": log.info("Loading new model"); PMML pmml = AppPMMLUtils.readPMMLFromUpdateKeyMessage(key, message, hadoopConf); if (pmml == null) { return; } KMeansPMMLUtils.validatePMMLVsSchema(pmml, inputSchema); List<ClusterInfo> clusters = KMeansPMMLUtils.read(pmml); model = new KMeansServingModel(clusters, inputSchema); log.info("New model: {}", model); break; default: throw new IllegalArgumentException("Bad key: " + key); } }
List<?> update = TextUtils.readJSON(message, List.class);
List<?> update = TextUtils.readJSON(message, List.class); int treeID = Integer.parseInt(update.get(0).toString()); String nodeID = update.get(1).toString();
private static Collection<String> checkFeatures(Path path, Collection<String> previousIDs) throws IOException { Collection<String> seenIDs = new HashSet<>(); for (Path file : IOUtils.listFiles(path, "part-*")) { Path uncompressedFile = copyAndUncompress(file); Files.lines(uncompressedFile).forEach(line -> { List<?> update = TextUtils.readJSON(line, List.class); seenIDs.add(update.get(0).toString()); assertEquals(FEATURES, TextUtils.convertViaJSON(update.get(1), float[].class).length); }); Files.delete(uncompressedFile); } assertNotEquals(0, seenIDs.size()); assertTrue(seenIDs.containsAll(previousIDs)); return seenIDs; }
return; // No model to interpret with yet, so skip it List<?> update = TextUtils.readJSON(message, List.class);
KeyMessage<String, String> update = updates.get(i); assertEquals("UP", update.getKey()); List<?> fields = TextUtils.readJSON(update.getMessage(), List.class); int treeID = (Integer) fields.get(0); String nodeID = fields.get(1).toString(); KeyMessage<String, String> update1 = updates.get(i); KeyMessage<String, String> update2 = updates.get(i + 1); List<?> fields1 = TextUtils.readJSON(update1.getMessage(), List.class); List<?> fields2 = TextUtils.readJSON(update2.getMessage(), List.class); int count1 = (Integer) fields1.get(3); int count2 = (Integer) fields2.get(3);
List<?> update = TextUtils.readJSON(value, List.class); if ("X".equals(update.get(0).toString())) { String userID = update.get(1).toString();
List<?> update = TextUtils.readJSON(updates.get(i).getMessage(), List.class); boolean isX = "X".equals(update.get(0).toString()); String id = update.get(1).toString(); List<?> update = TextUtils.readJSON(updates.get(i).getMessage(), List.class); boolean isX = "X".equals(update.get(0).toString()); String id = update.get(1).toString();
KeyMessage<String, String> update = updates.get(i); assertEquals("UP", update.getKey()); List<?> fields = TextUtils.readJSON(update.getMessage(), List.class); int treeID = (Integer) fields.get(0); String nodeID = fields.get(1).toString();
assertNotNull(seenProducts); List<?> update = TextUtils.readJSON(value, List.class);
KeyMessage<String,String> update = updates.get(i); assertEquals("UP", update.getKey()); List<?> fields = TextUtils.readJSON(update.getMessage(), List.class); int clusterID = (Integer) fields.get(0); double[] updatedCenter = TextUtils.convertViaJSON(fields.get(1), double[].class);
List<?> update = TextUtils.readJSON(message, List.class);