/** * Checks if the output table exists and is enabled. * * @param context The current context. * @throws IOException When the check fails. * @throws InterruptedException When the job is aborted. * @see OutputFormat#checkOutputSpecs(JobContext) */ @Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { Configuration hConf = getConf(); if (hConf == null) { hConf = context.getConfiguration(); } try (Admin admin = ConnectionFactory.createConnection(hConf).getAdmin()) { TableName tableName = TableName.valueOf(hConf.get(OUTPUT_TABLE)); if (!admin.tableExists(tableName)) { throw new TableNotFoundException("Can't write, table does not exist:" + tableName.getNameAsString()); } if (!admin.isTableEnabled(tableName)) { throw new TableNotEnabledException("Can't write, table is not enabled: " + tableName.getNameAsString()); } } }
@Override public OutputFormat getOutputFormat() throws IOException { if (outputFormat == null) { if (m_conf == null) { throw new IllegalStateException("setStoreLocation has not been called"); } else { this.outputFormat = new TableOutputFormat(); this.outputFormat.setConf(m_conf); } } return outputFormat; }
/** * Converts runtime vertex data to persistent vertex data (includes * incoming and outgoing edge data) and writes it to HBase. * * @param collection Graph collection * @throws IOException if fetching mapreduce instance failed */ private void writeVertices(final GraphCollection collection) throws IOException { // write (vertex-data) to HBase table Job job = Job.getInstance(); job.getConfiguration() .set(TableOutputFormat.OUTPUT_TABLE, getHBaseConfig().getVertexTableName().getNameAsString()); collection.getVertices() .map(new BuildVertexMutation(getHBaseConfig().getVertexHandler())) .output(new HadoopOutputFormat<>(new TableOutputFormat<>(), job)); }
/** * Converts runtime graph data to persistent graph data (including vertex * and edge identifiers) and writes it to HBase. * * @param collection Graph collection * @throws IOException if fetching mapreduce instance failed */ private void writeGraphHeads(final GraphCollection collection) throws IOException { // write (graph-data) to HBase table Job job = Job.getInstance(); job.getConfiguration() .set(TableOutputFormat.OUTPUT_TABLE, getHBaseConfig().getGraphTableName().getNameAsString()); collection.getGraphHeads() .map(new BuildGraphHeadMutation(getHBaseConfig().getGraphHeadHandler())) .output(new HadoopOutputFormat<>(new TableOutputFormat<>(), job)); }
/** * Converts runtime edge data to persistent edge data (includes * source/target vertex data) and writes it to HBase. * * @param collection Graph collection * @throws IOException if fetching mapreduce instance failed */ private void writeEdges(final GraphCollection collection) throws IOException { // write (edge-data) to HBase table Job job = Job.getInstance(); job.getConfiguration() .set(TableOutputFormat.OUTPUT_TABLE, getHBaseConfig().getEdgeTableName().getNameAsString()); collection.getEdges() .map(new BuildEdgeMutation(getHBaseConfig().getEdgeHandler())) .output(new HadoopOutputFormat<>(new TableOutputFormat<>(), job)); } }
/** * Checks if the output table exists and is enabled. * * @param context The current context. * @throws IOException When the check fails. * @throws InterruptedException When the job is aborted. * @see OutputFormat#checkOutputSpecs(JobContext) */ @Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { Configuration hConf = getConf(); if (hConf == null) { hConf = context.getConfiguration(); } try (Admin admin = ConnectionFactory.createConnection(hConf).getAdmin()) { TableName tableName = TableName.valueOf(hConf.get(OUTPUT_TABLE)); if (!admin.tableExists(tableName)) { throw new TableNotFoundException("Can't write, table does not exist:" + tableName.getNameAsString()); } if (!admin.isTableEnabled(tableName)) { throw new TableNotEnabledException("Can't write, table is not enabled: " + tableName.getNameAsString()); } } }