/** {@inheritDoc} */ @Override public FijiTableLayout getLayout() { return mTable.getLayout(); }
@Override public FijiTableReader run(final FijiTable fijiTable) throws Exception { return fijiTable.openTableReader(); } };
/** {@inheritDoc} */ @Override public FijiTableWriter openTableWriter() { return mTable.openTableWriter(); }
final FijiTable table = fiji.openTable(argURI.getTable()); try { final FijiTableLayout tableLayout = table.getLayout(); mapTypeFamilies, groupTypeColumns, mMaxVersions, mMinTimestamp, mMaxTimestamp); final FijiTableReader reader = table.openTableReader(); try { table.release();
final FijiTableWriter writer = fijiTable.openTableWriter(); for (Entry<String, List<FijiRestCell>> qualifiedCell : qualifiedCells.entrySet()) { final FijiColumnName column = new FijiColumnName(columnFamily, qualifiedCell.getKey()); if (!fijiTable.getLayout().exists(column)) { throw new WebApplicationException(new IllegalArgumentException( "Specified column does not exist: " + column), Response.Status.BAD_REQUEST); if (SchemaType.COUNTER == fijiTable.getLayout().getCellSchema(column).getType()) { JsonNode parsedCounterValue = BASIC_MAPPER.valueToTree(restCell.getValue()); if (parsedCounterValue.isIntegralNumber()) {
/** * Constructs a new context that can write cells to an HFile that can be loaded into an HBase * table. * * @param hadoopContext is the Hadoop {@link TaskInputOutputContext} that will be used to perform * the writes. * @throws IOException on I/O error. */ public HFileWriterContext(TaskInputOutputContext<?, ?, ?, ?> hadoopContext) throws IOException { super(hadoopContext); final Configuration conf = new Configuration(hadoopContext.getConfiguration()); final FijiURI outputURI = FijiURI.newBuilder(conf.get(FijiConfKeys.FIJI_OUTPUT_TABLE_URI)).build(); mFiji = Fiji.Factory.open(outputURI, conf); mTable = mFiji.openTable(outputURI.getTable()); mReader = mTable.openTableReader(); mColumnNameTranslator = HBaseColumnNameTranslator.from(mTable.getLayout()); mEntityIdFactory = EntityIdFactory.getFactory(mTable.getLayout()); }
if (NUM_SPLITS_AUTO == nsplits) { final List<HFileKeyValue> startKeys = Lists.newArrayList(); for (FijiRegion region : table.getRegions()) { startKeys.add(HFileKeyValue.createFromRowKey(region.getStartKey())); switch (FijiTableLayout.getEncoding(table.getLayout().getDesc().getKeysFormat())) { case RAW: { + "determined by the number of HRegions in the HTable. " + "Use an HFileMapReduceJobOutput constructor that enables auto splitting.", table.getName())); + FijiTableLayout.getEncoding(table.getLayout().getDesc().getKeysFormat()));
/** * Constructs a new context that can write cells directly to a Fiji table. * * @param hadoopContext is the Hadoop {@link TaskInputOutputContext} that will be used to perform * the writes. * @throws IOException on I/O error. */ public DirectFijiTableWriterContext(TaskInputOutputContext<?, ?, ?, ?> hadoopContext) throws IOException { super(hadoopContext); final Configuration conf = new Configuration(hadoopContext.getConfiguration()); final FijiURI outputURI = FijiURI.newBuilder(conf.get(FijiConfKeys.FIJI_OUTPUT_TABLE_URI)).build(); mFiji = Fiji.Factory.open(outputURI, conf); mTable = mFiji.openTable(outputURI.getTable()); mWriter = mTable.getWriterFactory().openBufferedWriter(); mEntityIdFactory = EntityIdFactory.getFactory(mTable.getLayout()); }
final FijiTableWriter tableWriter = mTable.openTableWriter(); try { for (int iuser = 0; iuser < mNumUsers; iuser++) { final String email = EmailSynthesizer.formatEmail(fullName.replace(" ", "."), emailSynth.synthesizeDomain()); final EntityId entityId = mTable.getEntityId(email); tableWriter.put(entityId, "info", "name", fullName); tableWriter.put(entityId, "info", "email", email);
final FijiDataRequest request = builder.build(); final FijiRowData data; final FijiTableReader reader = mFijiTable.openTableReader(); try { data = reader.get(mFijiTable.getEntityId(jobId), request); } finally { reader.close();
final FijiTableLayout tableLayout = table.getLayout(); for (FamilyLayout family : tableLayout.getFamilies()) { if (family.isMapType()) { getPrintStream().println(FijiURI.newBuilder(table.getURI()) .addColumnName(FijiColumnName.create(family.getName())) .build()); } else { for (ColumnLayout column : family.getColumns()) { getPrintStream().println(FijiURI.newBuilder(table.getURI()) .addColumnName(FijiColumnName.create(family.getName(), column.getName())) .build());
final FijiTableWriter writer = table.openTableWriter(); try { if (columns.isEmpty()) { if (mayProceed("Are you sure you want to delete all cells with timestamp <= %d" + " from row '%s' in table '%s'?", timestamp, entityId, table.getURI())) { writer.deleteRow(entityId, timestamp); entityId, table.getURI())) { writer.deleteRow(entityId); if (!mayProceed("Are you sure you want to delete cell with timestamp %d of columns %s " + "from row '%s' in table '%s'?", timestamp, Joiner.on(",").join(columns), entityId, table.getURI())) { return SUCCESS; if (!mayProceed("Are you sure you want to delete the most recent cells of columns %s " + "from row '%s' in table '%s'?", timestamp, Joiner.on(",").join(columns), entityId, table.getURI())) { return SUCCESS; if (!mayProceed("Are you sure you want to delete all cells of columns %s " + "with timestamp <= %d from row '%s' in table '%s'?", Joiner.on(",").join(columns), timestamp, entityId, table.getURI())) { return SUCCESS; Joiner.on(",").join(columns), entityId, table.getURI())) { return SUCCESS;
) throws IOException { final EntityId eid = mFijiTable.getEntityId(jobId); final AtomicFijiPutter putter = mFijiTable.getWriterFactory().openAtomicPutter(); try { putter.begin(eid);
/** {@inheritDoc} */ @Override public EntityId getEntityId(Object... fijiRowKey) { return mTable.getEntityId(fijiRowKey); }
/** {@inheritDoc} */ @Override public List<FijiRegion> getRegions() throws IOException { return mTable.getRegions(); }
@Override public void close() throws IOException { mFijiTable.release(); } }
/** {@inheritDoc} */ @Override public FijiURI getURI() { return mTable.getURI(); }
/** {@inheritDoc} */ @Override public FijiReaderFactory getReaderFactory() throws IOException { return mTable.getReaderFactory(); }