@Override public String toString() { StringBuilder sb = new StringBuilder("MaprDBJsonRecordReader[Table=") .append(table != null ? table.getPath() : null); if (reader != null) { sb.append(", Document ID=") .append(IdCodec.asString(reader.getId())); } sb.append(", reader=") .append(reader) .append(']'); return sb.toString(); } }
private void createIndex(Table table, String[] indexDef) throws Exception { if (indexDef == null) { // don't create index here. indexes may have been created return; } for (int i = 0; i < indexDef.length / 3; ++i) { String indexCmd = String.format("maprcli table index add" + " -path " + table.getPath() + " -index %s" + " -indexedfields '%s'" + ((indexDef[3 * i + 2].length()==0)?"":" -includedfields '%s'") + ((indexDef[3 * i].startsWith("hash"))? " -hashed true" : ""), indexDefInCommand(indexDef[3 * i]), // index name indexDefInCommand(indexDef[3 * i + 1]), // indexedfields indexDefInCommand(indexDef[3 * i + 2])); // includedfields System.out.println(indexCmd); TestCluster.runCommand(indexCmd); DBTests.admin().getTableIndexes(table.getPath(), true); } }
@BeforeClass public static void setup_TestEncodedFieldPaths() throws Exception { try (Table table = DBTests.createOrReplaceTable(TABLE_NAME, ImmutableMap.of("codes", "codes"))) { tableCreated = true; tablePath = table.getPath().toUri().getPath(); DBTests.createIndex(TABLE_NAME, INDEX_NAME, new String[] {"age"}, new String[] {"name.last", "data.salary"}); DBTests.admin().getTableIndexes(table.getPath(), true); try (final InputStream in = TestEncodedFieldPaths.class.getResourceAsStream(JSON_FILE_URL); final DocumentStream stream = Json.newDocumentStream(in);) { table.insertOrReplace(stream); table.flush(); } // wait for the indexes to sync DBTests.waitForRowCount(table.getPath(), 5, INDEX_FLUSH_TIMEOUT); DBTests.waitForIndexFlush(table.getPath(), INDEX_FLUSH_TIMEOUT); } finally { test("ALTER SESSION SET `planner.disable_full_table_scan` = true"); } }
@BeforeClass public static void setup_TestSimpleJson() throws Exception { try (Table table = DBTests.createOrReplaceTable(TABLE_NAME); InputStream in = MaprDBTestsSuite.getJsonStream(JSON_FILE_URL); DocumentStream stream = Json.newDocumentStream(in)) { tableCreated = true; tablePath = table.getPath().toUri().getPath(); for (Document document : stream) { table.insert(document, "business_id"); } table.flush(); } }
public void readToInitSchema() { DBDocumentReaderBase reader = null; vectorWriter.setPosition(0); try (DocumentStream dstream = table.find()) { reader = (DBDocumentReaderBase) dstream.iterator().next().asReader(); documentWriter.writeDBDocument(vectorWriter, reader); } catch(UserException e) { throw UserException.unsupportedError(e) .addContext(String.format("Table: %s, document id: '%s'", getTable().getPath(), reader == null ? null : IdCodec.asString(reader.getId()))) .build(logger); } catch (SchemaChangeException e) { if (getIgnoreSchemaChange()) { logger.warn("{}. Dropping the row from result.", e.getMessage()); logger.debug("Stack trace:", e); } else { throw dataReadError(logger, e); } } finally { vectorWriter.setPosition(0); } }
@BeforeClass public static void setup_TestSimpleJson() throws Exception { // We create a large table with auto-split set to disabled. // Without intra-tablet partitioning, this test should run with only one minor fragment try (Table table = DBTests.createOrReplaceTable(TABLE_NAME, false /*autoSplit*/); InputStream in = MaprDBTestsSuite.getJsonStream(JSON_FILE_URL); DocumentStream stream = Json.newDocumentStream(in)) { tableCreated = true; tablePath = table.getPath().toUri().getPath(); List<Document> docs = Lists.newArrayList(stream); for (char ch = 'A'; ch <= 'T'; ch++) { for (int rowIndex = 0; rowIndex < 5000; rowIndex++) { for (int i = 0; i < docs.size(); i++) { final Document document = docs.get(i); final String id = String.format("%c%010d%03d", ch, rowIndex, i); document.set("documentId", rowIndex); table.insertOrReplace(id, document); } } } table.flush(); DBTests.waitForRowCount(table.getPath(), TOTAL_ROW_COUNT); setSessionOption("planner.width.max_per_node", 5); } }
throw UserException.unsupportedError(e) .addContext(String.format("Table: %s, document id: '%s'", table.getPath(), document.asReader() == null ? null : IdCodec.asString(((DBDocumentReaderBase)document.asReader()).getId())))
} catch (UserException e) { throw UserException.unsupportedError(e).addContext(String.format("Table: %s, document id: '%s'", getTable().getPath(), reader == null ? null : IdCodec.asString(reader.getId()))).build(logger); } catch (SchemaChangeException e) { if (getIgnoreSchemaChange()) {
DBTests.waitForIndexFlush(table.getPath(), INDEX_FLUSH_TIMEOUT); Thread.sleep(200000);
@Override public String toString() { StringBuilder sb = new StringBuilder("MaprDBJsonRecordReader[Table=") .append(table != null ? table.getPath() : null); if (reader != null) { sb.append(", Document ID=") .append(IdCodec.asString(reader.getId())); } sb.append(", reader=") .append(reader) .append(']'); return sb.toString(); } }
public void readToInitSchema() { DBDocumentReaderBase reader = null; vectorWriter.setPosition(0); try (DocumentStream dstream = table.find()) { reader = (DBDocumentReaderBase) dstream.iterator().next().asReader(); documentWriter.writeDBDocument(vectorWriter, reader); } catch(UserException e) { throw UserException.unsupportedError(e) .addContext(String.format("Table: %s, document id: '%s'", getTable().getPath(), reader == null ? null : IdCodec.asString(reader.getId()))) .build(logger); } catch (SchemaChangeException e) { if (getIgnoreSchemaChange()) { logger.warn("{}. Dropping the row from result.", e.getMessage()); logger.debug("Stack trace:", e); } else { throw dataReadError(logger, e); } } finally { vectorWriter.setPosition(0); } }
throw UserException.unsupportedError(e) .addContext(String.format("Table: %s, document id: '%s'", table.getPath(), document.asReader() == null ? null : IdCodec.asString(((DBDocumentReaderBase)document.asReader()).getId())))
} catch (UserException e) { throw UserException.unsupportedError(e).addContext(String.format("Table: %s, document id: '%s'", getTable().getPath(), reader == null ? null : IdCodec.asString(reader.getId()))).build(logger); } catch (SchemaChangeException e) { if (getIgnoreSchemaChange()) {