public static void main(String[] args) throws Exception { Configuration config = HBaseConfiguration.create(); if (args.length > 0) { config.set("hbase.rootdir", args[0]); } HTable table = new HTable(config, "WordCount"); for (String word : WordSpout.words) { Get get = new Get(Bytes.toBytes(word)); Result result = table.get(get); byte[] countBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("count")); byte[] wordBytes = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("word")); String wordStr = Bytes.toString(wordBytes); System.out.println(wordStr); long count = Bytes.toLong(countBytes); System.out.println("Word: '" + wordStr + "', Count: " + count); } } }
public static void main(String[] args) throws IOException { foo(6, 5); foo(5, 2); foo(3, 0); Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "hbase_host"); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); HTable table = new HTable(conf, "test1"); Put put = new Put(Bytes.toBytes("row1")); put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"), Bytes.toBytes("val1")); put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual2"), Bytes.toBytes("val2")); table.put(put); table.close(); } }
@Test public void testCompositeRowIndexPredication() throws Exception { hAdmin.tableExists("hbase_table"); HTable htable = new HTable(testingCluster.getHBaseUtil().getConf(), "hbase_table"); try { org.apache.hadoop.hbase.util.Pair<byte[][], byte[][]> keys = htable.getStartEndKeys(); Scan scan = new Scan(); scan.setStartRow("021".getBytes()); scan.setStopRow(("021_" + new String(new char[]{Character.MAX_VALUE})).getBytes()); } finally { executeString("DROP TABLE hbase_mapped_table PURGE").close(); htable.close(); hAdmin.close();
@Test public void testSimpleDeletes() throws Exception { HTable primary = createSetupTables(fam1); // do a simple Put long ts = 10; Put p = new Put(row1); p.add(FAM, indexed_qualifer, ts, value1); p.add(FAM, regular_qualifer, ts, value2); primary.put(p); primary.flushCommits(); Delete d = new Delete(row1); primary.delete(d); HTable index = new HTable(UTIL.getConfiguration(), fam1.getTable()); List<KeyValue> expected = Collections.<KeyValue> emptyList(); // scan over all time should cause the delete to be covered IndexTestingUtils.verifyIndexTableAtTimestamp(index, expected, 0, Long.MAX_VALUE, value1, HConstants.EMPTY_END_ROW); // scan at the older timestamp should still show the older value List<Pair<byte[], CoveredColumn>> pairs = new ArrayList<Pair<byte[], CoveredColumn>>(); pairs.add(new Pair<byte[], CoveredColumn>(value1, col1)); pairs.add(new Pair<byte[], CoveredColumn>(EMPTY_BYTES, col2)); expected = CoveredColumnIndexCodec.getIndexKeyValueForTesting(row1, ts, pairs); IndexTestingUtils.verifyIndexTableAtTimestamp(index, expected, ts, value1); // cleanup closeAndCleanupTables(index, primary); }
@Test(timeout = 300000) public void testQuickFailure() throws Exception { byte[] family = Bytes.toBytes("family"); ColumnGroup fam1 = new ColumnGroup(getIndexTableName()); Configuration conf = new Configuration(UTIL.getConfiguration()); HTable primary = new HTable(conf, primaryTable); primary.setAutoFlush(false, true); Put p = new Put(Bytes.toBytes("row")); p.add(family, null, Bytes.toBytes("value")); primary.put(p); try { LOG.info("Correclty got a failure of the put!"); primary.close();
@Test public void testNonForwardQuery() throws Exception { executeString("CREATE TABLE hbase_mapped_table1 (rk text, col1 text, col2 text, col3 int) " + try (HBaseAdmin hAdmin = new HBaseAdmin(testingCluster.getHBaseUtil().getConf())) { hAdmin.tableExists("hbase_table1"); htable = new HTable(testingCluster.getHBaseUtil().getConf(), "hbase_table1"); org.apache.hadoop.hbase.util.Pair<byte[][], byte[][]> keys = htable.getStartEndKeys(); assertEquals(5, keys.getFirst().length); htable.close();
@Test public void testSimplePutGet() throws Exception { Configuration conf = HBaseConfiguration.create(); HTable table = new HTable(conf, tableName); byte[] row = Bytes.toBytes("row_" + i); Put p = new Put(row); for (HColumnDescriptor hcd : htd.getFamilies()) { byte[] row = Bytes.toBytes("row_" + i); Get g = new Get(row); Result result = table.get(g);
public static void main(String[] args) throws IOException { foo(6, 5); foo(5, 2); foo(3, 0); Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "hbase_host"); conf.set("zookeeper.znode.parent", "/hbase-unsecure"); HTable table = new HTable(conf, "test1"); Put put = new Put(Bytes.toBytes("row1")); put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"), Bytes.toBytes("val1")); put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual2"), Bytes.toBytes("val2")); table.put(put); table.close(); } }
/** * Returns all rows from the .META. table. * * @throws IOException When reading the rows fails. */ public List<byte[]> getMetaTableRows() throws IOException { // TODO: Redo using MetaReader class HTable t = new HTable(new Configuration(this.conf), HConstants.META_TABLE_NAME); List<byte[]> rows = new ArrayList<byte[]>(); ResultScanner s = t.getScanner(new Scan()); for (Result result : s) { LOG.info("getMetaTableRows: row -> " + Bytes.toStringBinary(result.getRow())); rows.add(result.getRow()); } s.close(); t.close(); return rows; }
HTable table = new HTable(conf, "testtable"); Put put = new Put(Bytes.toBytes("row1")); put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"), Bytes.toBytes("val1")); put.add(Bytes.toBytes("colfam2"), Bytes.toBytes("qual2"), table.put(put); Scan scan = new Scan(); ResultScanner scanner = table.getScanner(scan); for (Result result2 : scanner) { table.delete(delete); Scan scan2 = new Scan(); ResultScanner scanner2 = table.getScanner(scan2); for (Result result2 : scanner2) {
byte[] tableNameBytes = Bytes.toBytes(tableName); HTableDescriptor desc = new HTableDescriptor(tableNameBytes); desc.addFamily(FAM1); HTable primary = new HTable(UTIL.getConfiguration(), tableNameBytes);
/** * Remove all rows between tests in the same class. */ @After public void deleteAllRows() throws Exception { HTableDescriptor[] tableDescs = getTestUtil().getHBaseAdmin().listTables(); for(HTableDescriptor tableDesc: tableDescs) { HTable hTable = null; ResultScanner rs = null; try { hTable = new HTable(getTestUtil().getConfiguration(), tableDesc.getName()); rs = hTable.getScanner(new Scan()); for(Result result: rs) { hTable.delete(new Delete(result.getRow())); } } finally { if(rs != null) { rs.close(); } if(hTable != null) { hTable.close(); } } } }
/** * Create the primary table (to which you should write), setup properly for indexing the given * {@link ColumnGroup}s. Also creates the necessary index tables to match the passes groups. * @param groups {@link ColumnGroup}s to index, creating one index table per column group. * @return reference to the primary table * @throws IOException if there is an issue communicating with HBase */ private HTable createSetupTables(ColumnGroup... groups) throws IOException { HBaseAdmin admin = UTIL.getHBaseAdmin(); // setup the index CoveredColumnIndexSpecifierBuilder builder = new CoveredColumnIndexSpecifierBuilder(); for (ColumnGroup group : groups) { builder.addIndexGroup(group); // create the index tables CoveredColumnIndexer.createIndexTable(admin, group.getTable()); } // setup the primary table String indexedTableName = Bytes.toString(TestTable.getTableName()); HTableDescriptor pTable = new HTableDescriptor(indexedTableName); pTable.addFamily(new HColumnDescriptor(FAM)); pTable.addFamily(new HColumnDescriptor(FAM2)); builder.build(pTable); // create the primary table admin.createTable(pTable); HTable primary = new HTable(UTIL.getConfiguration(), indexedTableName); primary.setAutoFlush(false); return primary; }
@Test public void testInsertIntoMultiRegion() throws Exception { executeString("CREATE TABLE hbase_mapped_table (rk text, col1 text) TABLESPACE cluster1 " + ResultScanner scanner = null; try { htable = new HTable(testingCluster.getHBaseUtil().getConf(), "hbase_table"); Scan scan = new Scan(); scan.addFamily(Bytes.toBytes("col1")); scanner = htable.getScanner(scan); htable.close();
@Test public void testMultipleConcurrentGroupsUpdated() throws Exception { HTable primary = createSetupTables(fam1, fam2); HTable index1 = new HTable(UTIL.getConfiguration(), fam1.getTable()); HTable index2 = new HTable(UTIL.getConfiguration(), fam2.getTable());
@Test public void testExceedVersionsOutOfOrderPut() throws Exception { byte[] value4 = Bytes.toBytes("val4"); byte[] value5 = Bytes.toBytes("val5"); p.add(FAM2, indexed_qualifer, ts1, value1); primary.put(p); HTable index = new HTable(UTIL.getConfiguration(), fam2.getTable()); Scan s = new Scan(); s.setRaw(true); ResultScanner scanner = index.getScanner(s); Scan s = new Scan(); s.setRaw(true); ResultScanner scanner = index.getScanner(s);
@Test public void testJoin() throws Exception { executeString("CREATE TABLE hbase_mapped_table (rk text, col1 text, col2 text, col3 int8) " + try { hAdmin.tableExists("hbase_table"); htable = new HTable(testingCluster.getHBaseUtil().getConf(), "hbase_table"); org.apache.hadoop.hbase.util.Pair<byte[][], byte[][]> keys = htable.getStartEndKeys(); assertEquals(5, keys.getFirst().length); hAdmin.close(); if (htable != null) { htable.close();
@Test public void testSimplePutGet() throws Exception { Configuration conf = HBaseConfiguration.create(); HTable table = new HTable(conf, tableName); byte[] row = Bytes.toBytes("row_" + i); Put p = new Put(row); for (HColumnDescriptor hcd : htd.getFamilies()) { byte[] row = Bytes.toBytes("row_" + i); Get g = new Get(row); Result result = table.get(g);