@Override public void apply() { // the table should not have initialized=true kvTable.write("initialized", "false"); } });
@Override public void apply() throws Exception { // write a value kvTable.write(KEY2, VAL1); } });
@Override public void apply() { // the table should not have initialized=true kvTable.write("initialized", "false"); } });
@Override protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { long sum = 0; for (LongWritable value : values) { sum += value.get(); } context.write(key, new LongWritable(sum)); clusterNameTable.write(prefix + "reducer.cluster.name", clusterName); }
@Override public void apply() throws Exception { // write 0..1000 to the table for (int i = 0; i < 1000; i++) { byte[] key = Bytes.toBytes(i); t.write(key, key); } } });
@Override public void apply() throws Exception { // write 1000 random values to the table and remember them in a set Random rand = new Random(451); for (int i = 0; i < 1000; i++) { long keyLong = rand.nextLong(); byte[] key = Bytes.toBytes(keyLong); t.write(key, key); keysWritten.add(keyLong); } } });
@Override public void store(final ProgramId serviceId, final RouteConfig routeConfig) { Transactionals.execute(transactional, context -> { getRouteTable(context).write(ServiceDiscoverable.getName(serviceId), GSON.toJson(routeConfig.getRoutes())); }); }
@Override public void run(DatasetContext context) throws Exception { KeyValueTable table = context.getDataset(tableName); table.write(key, value); } });
@Override public void run(DatasetContext context) throws Exception { KeyValueTable table = context.getDataset(DATASET); table.write(key, value); } });
@WriteOnly @Override public void write(String key, T object) { kvTable.write(Bytes.toBytes(key), encode(object)); }
@Override public void run(DatasetContext context) throws Exception { KeyValueTable table = context.getDataset(DATASET_NAME); // Write only if the dataset instance is the same as the one gotten in initialize. if (datasetHashCode == System.identityHashCode(table)) { table.write(DATASET_TEST_KEY, valueToWriteOnRun); } } });
@Override public void run(DatasetContext context) throws Exception { KeyValueTable kvTable = context.getDataset(outputTable); for (Map.Entry<String, String> entry : aggregated.collectAsMap().entrySet()) { kvTable.write(entry.getKey(), entry.getValue()); } } });
@Path(ENDPOINT) @GET public void handler(HttpServiceRequest request, HttpServiceResponder responder) { LOG.info("Endpoint {} called in service {}", ENDPOINT, NAME); table = getContext().getDataset(DATASET_NAME); table.write("no-op-service", "no-op-service"); responder.sendStatus(200); } }
@PUT @Path("write/{data}") public void write(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("data") String data) { String appName = getContext().getApplicationSpecification().getName(); KeyValueTable dataset = getContext().getDataset(tableName); dataset.write(appName + "." + data, data); responder.sendStatus(200); } }
@Override public void destroy() { super.destroy(); KeyValueTable table = getContext().getDataset(TRANSACTIONS_DATASET_NAME); table.write(DESTROY_KEY, VALUE); } }
@Override public void run(DatasetContext context) throws Exception { KeyValueTable clusterNameTable = context.getDataset(CLUSTER_NAME_TABLE); WorkflowInfo workflowInfo = getContext().getWorkflowInfo(); String prefix = workflowInfo == null ? "" : workflowInfo.getName() + "."; clusterNameTable.write(prefix + "action.cluster.name", getContext().getClusterName()); } });
private void addDummyData(NamespaceId namespaceId, String datasetName) throws Exception { DataSetManager<KeyValueTable> tableManager = getDataset(namespaceId.dataset(datasetName)); KeyValueTable inputTable = tableManager.get(); inputTable.write("hello", "world"); tableManager.flush(); }
@Override public void run(DatasetContext context) throws Exception { KeyValueTable clusterNameTable = context.getDataset(CLUSTER_NAME_TABLE); WorkflowInfo workflowInfo = sec.getWorkflowInfo(); String prefix = workflowInfo == null ? "" : workflowInfo.getName() + "."; clusterNameTable.write(prefix + "spark.cluster.name", sec.getClusterName()); } });
@Override public void destroy() { KeyValueTable kv = getContext().getDataset("recorder"); kv.write("status", getContext().getState().getStatus().name()); } }
@Override protected void initialize() throws Exception { Job job = getContext().getHadoopJob(); job.setMapperClass(ClusterNameMapper.class); job.setReducerClass(ClusterNameReducer.class); getContext().addInput(Input.ofDataset(INPUT_FILE_SET)); getContext().addOutput(Output.ofDataset(OUTPUT_FILE_SET)); WorkflowInfo workflowInfo = getContext().getWorkflowInfo(); String prefix = workflowInfo == null ? "" : workflowInfo.getName() + "."; clusterNameTable.write(prefix + "mr.client.cluster.name", getContext().getClusterName()); }