Tabnine Logo
HCatCreateTableDesc$Builder.build
Code IndexAdd Tabnine to your IDE (free)

How to use
build
method
in
org.apache.hive.hcatalog.api.HCatCreateTableDesc$Builder

Best Java code snippets using org.apache.hive.hcatalog.api.HCatCreateTableDesc$Builder.build (Showing top 20 results out of 315)

origin: apache/hive

try {
 HCatCreateTableDesc tableDesc = HCatCreateTableDesc
  .create(null, tableName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc);
} catch (Exception exp) {
 client.dropTable(null, newName, true);
 HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc
  .create(null, newName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc2);
 HCatTable newTable = client.getTable(null, newName);
origin: apache/hive

@Test
public void testGetMessageBusTopicName() throws Exception {
 try {
  HCatClient client = HCatClient.create(new Configuration(hcatConf));
  String dbName = "testGetMessageBusTopicName_DBName";
  String tableName = "testGetMessageBusTopicName_TableName";
  client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
  client.createDatabase(HCatCreateDBDesc.create(dbName).build());
  String messageBusTopicName = "MY.topic.name";
  Map<String, String> tableProperties = new HashMap<String, String>(1);
  tableProperties.put(HCatConstants.HCAT_MSGBUS_TOPIC_NAME, messageBusTopicName);
  client.createTable(HCatCreateTableDesc.create(dbName, tableName, Arrays.asList(new HCatFieldSchema("foo", Type.STRING, ""))).tblProps(tableProperties).build());
  assertEquals("MessageBus topic-name doesn't match!", messageBusTopicName, client.getMessageBusTopicName(dbName, tableName));
  client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
  client.close();
 }
 catch (Exception exception) {
  LOG.error("Unexpected exception.", exception);
  assertTrue("Unexpected exception:" + exception.getMessage(), false);
 }
}
origin: apache/hive

@Test
public void testCreateTableLike() throws Exception {
 HCatClient client = HCatClient.create(new Configuration(hcatConf));
 String tableName = "tableone";
 String cloneTable = "tabletwo";
 client.dropTable(null, tableName, true);
 client.dropTable(null, cloneTable, true);
 ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
 cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
 cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
 HCatCreateTableDesc tableDesc = HCatCreateTableDesc
  .create(null, tableName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc);
 // create a new table similar to previous one.
 client.createTableLike(null, tableName, cloneTable, true, false, null);
 List<String> tables = client.listTableNamesByPattern(null, "table*");
 assertTrue(tables.size() == 2);
 client.close();
}
origin: apache/hive

  .comment("Source table.");
sourceMetaStore().createTable(HCatCreateTableDesc.create(sourceTable).build());
targetMetaStore().createTable(HCatCreateTableDesc.create(targetTable).build());
targetTable = targetMetaStore().getTable(dbName, tableName);
origin: apache/hive

sourceMetaStore().createTable(HCatCreateTableDesc.create(sourceTable).build());
targetMetaStore().createTable(HCatCreateTableDesc.create(targetTable).build());
origin: apache/hive

cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
HCatCreateTableDesc tableDesc = HCatCreateTableDesc
 .create(db, tableOne, cols).fileFormat("rcfile").build();
client.createTable(tableDesc);
HCatTable table1 = client.getTable(db, tableOne);
HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc.create(db,
 tableTwo, cols).fieldsTerminatedBy('\001').escapeChar('\002').linesTerminatedBy('\003').
 mapKeysTerminatedBy('\004').collectionItemsTerminatedBy('\005').nullDefinedAs('\006').build();
client.createTable(tableDesc2);
HCatTable table2 = client.getTable(db, tableTwo);
 tableThree, cols).fileFormat("orcfile").build();
client.createTable(tableDesc3);
HCatTable table3 = client.getTable(db, tableThree);
origin: apache/hive

                         .partCols(ptnCols)
                         .fileFormat("sequenceFile");
HCatCreateTableDesc tableDesc = HCatCreateTableDesc.create(table, false).build();
client.createTable(tableDesc);
origin: apache/hive

HCatTable table = (new HCatTable(dbName, tableName)).tblProps(props).cols(cols).partCols(pcols);
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable tableCreated = client.getTable(dbName, tableName);
assertNotNull(tableCreated);
origin: apache/hive

HCatTable tableToCreate = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols);
client.createTable(HCatCreateTableDesc.create(tableToCreate).build());
HCatTable t1 = client.getTable(dbName, tableName);
assertNotNull(t1);
HCatTable tableToCreate2 = (new HCatTable(dbName, tableName)).tblProps(tprops2).cols(cols);
client.createTable(HCatCreateTableDesc.create(tableToCreate2).build());
HCatTable t3 = client.getTable(dbName, tableName);
assertNotNull(t3);
origin: apache/hive

client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable t = client.getTable(dbName, tableName);
assertNotNull(t);
origin: apache/hive

HCatTable table = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols).partCols(pcols);
client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable tableCreated = client.getTable(dbName, tableName);
assertNotNull(tableCreated);
origin: apache/hive

  Arrays.asList(new HCatFieldSchema(partitionColumn, Type.STRING, "")));
HCatTable table = new HCatTable(dbName, tableName).cols(columns).partCols(partitionColumns);
client.createTable(HCatCreateTableDesc.create(table, false).build());
origin: apache/hive

client.createTable(HCatCreateTableDesc.create(table, false).build());
origin: apache/hive

client.createTable(HCatCreateTableDesc.create(table).build());
HCatTable t = client.getTable(dbName, tableName);
assertNotNull(t);
origin: apache/hive

client.createTable(HCatCreateTableDesc.create(table, false).build());
origin: apache/hive

try {
 HCatCreateTableDesc tableDesc = HCatCreateTableDesc
  .create(null, tableName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc);
 client.dropTable(null, newName, true);
 HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc
  .create(null, newName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc2);
 HCatTable newTable = client.getTable(null, newName);
origin: apache/hive

@Test
public void testRenameTable() throws Exception {
 HCatClient client = HCatClient.create(new Configuration(hcatConf));
 String tableName = "temptable";
 String newName = "mytable";
 client.dropTable(null, tableName, true);
 client.dropTable(null, newName, true);
 ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
 cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
 cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
 HCatCreateTableDesc tableDesc = HCatCreateTableDesc
  .create(null, tableName, cols).fileFormat("rcfile").build();
 client.createTable(tableDesc);
 client.renameTable(null, tableName, newName);
 try {
  client.getTable(null, tableName);
 } catch (HCatException exp) {
  assertTrue("Unexpected exception message: " + exp.getMessage(),
    exp.getMessage().contains("NoSuchObjectException while fetching table"));
 }
 HCatTable newTable = client.getTable(null, newName);
 assertTrue(newTable != null);
 assertTrue(newTable.getTableName().equals(newName));
 client.close();
}
origin: apache/hive

@Test
public void testPartitionSchema() throws Exception {
 try {
  HCatClient client = HCatClient.create(new Configuration(hcatConf));
  final String dbName = "myDb";
  final String tableName = "myTable";
  client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
  client.createDatabase(HCatCreateDBDesc.create(dbName).build());
  List<HCatFieldSchema> columnSchema = Arrays.asList(new HCatFieldSchema("foo", Type.INT, ""),
    new HCatFieldSchema("bar", Type.STRING, ""));
  List<HCatFieldSchema> partitionSchema = Arrays.asList(new HCatFieldSchema("dt", Type.STRING, ""),
    new HCatFieldSchema("grid", Type.STRING, ""));
  client.createTable(HCatCreateTableDesc.create(dbName, tableName, columnSchema).partCols(partitionSchema).build());
  HCatTable table = client.getTable(dbName, tableName);
  List<HCatFieldSchema> partitionColumns = table.getPartCols();
  assertArrayEquals("Didn't get expected partition-schema back from the HCatTable.",
    partitionSchema.toArray(), partitionColumns.toArray());
  client.dropDatabase(dbName, false, HCatClient.DropDBMode.CASCADE);
 }
 catch (Exception unexpected) {
  LOG.error("Unexpected exception!", unexpected);
  assertTrue("Unexpected exception! " + unexpected.getMessage(), false);
 }
}
origin: apache/hive

@Test
public void testUpdateTableSchema() throws Exception {
 try {
  HCatClient client = HCatClient.create(new Configuration(hcatConf));
  final String dbName = "testUpdateTableSchema_DBName";
  final String tableName = "testUpdateTableSchema_TableName";
  client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
  client.createDatabase(HCatCreateDBDesc.create(dbName).build());
  List<HCatFieldSchema> oldSchema = Arrays.asList(new HCatFieldSchema("foo", Type.INT, ""),
    new HCatFieldSchema("bar", Type.STRING, ""));
  client.createTable(HCatCreateTableDesc.create(dbName, tableName, oldSchema).build());
  List<HCatFieldSchema> newSchema = Arrays.asList(new HCatFieldSchema("completely", Type.DOUBLE, ""),
    new HCatFieldSchema("new", Type.STRING, ""),
    new HCatFieldSchema("fields", Type.STRING, ""));
  client.updateTableSchema(dbName, tableName, newSchema);
  assertArrayEquals(newSchema.toArray(), client.getTable(dbName, tableName).getCols().toArray());
  client.dropDatabase(dbName, false, HCatClient.DropDBMode.CASCADE);
 }
 catch (Exception exception) {
  LOG.error("Unexpected exception.", exception);
  assertTrue("Unexpected exception: " + exception.getMessage(), false);
 }
}
origin: apache/hive

/**
 * This test tests that a plain table instantiation matches what hive says an
 * empty table create should look like.
 * @throws Exception
 */
@Test
public void testEmptyTableInstantiation() throws Exception {
 HCatClient client = HCatClient.create(new Configuration(hcatConf));
 String dbName = "default";
 String tblName = "testEmptyCreate";
 ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
 cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
 cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
 client.dropTable(dbName, tblName, true);
 // Create a minimalistic table
 client.createTable(HCatCreateTableDesc
   .create(new HCatTable(dbName, tblName).cols(cols), false)
   .build());
 HCatTable tCreated = client.getTable(dbName, tblName);
 org.apache.hadoop.hive.metastore.api.Table emptyTable = Table.getEmptyTable(dbName, tblName);
 Map<String, String> createdProps = tCreated.getTblProps();
 Map<String, String> emptyProps = emptyTable.getParameters();
 mapEqualsContainedIn(emptyProps, createdProps);
 // Test sd params - we check that all the parameters in an empty table
 // are retained as-is. We may add beyond it, but not change values for
 // any parameters that hive defines for an empty table.
 Map<String, String> createdSdParams = tCreated.getSerdeParams();
 Map<String, String> emptySdParams = emptyTable.getSd().getSerdeInfo().getParameters();
 mapEqualsContainedIn(emptySdParams, createdSdParams);
}
org.apache.hive.hcatalog.apiHCatCreateTableDesc$Builderbuild

Javadoc

Builds the HCatCreateTableDesc.

Popular methods of HCatCreateTableDesc$Builder

  • <init>
  • serdeParam
    used for setting arbitrary SerDe parameter
  • fileFormat
    File format.
  • partCols
    Partition cols.
  • tblProps
    Tbl props.
  • collectionItemsTerminatedBy
    See row_format element of CREATE_TABLE DDL for Hive.
  • escapeChar
    See row_format element of CREATE_TABLE DDL for Hive.
  • fieldsTerminatedBy
    See row_format element of CREATE_TABLE DDL for Hive.
  • linesTerminatedBy
    See row_format element of CREATE_TABLE DDL for Hive.
  • mapKeysTerminatedBy
    See row_format element of CREATE_TABLE DDL for Hive.
  • nullDefinedAs
    See row_format element of CREATE_TABLE DDL for Hive.
  • nullDefinedAs

Popular in Java

  • Parsing JSON documents to java classes using gson
  • getResourceAsStream (ClassLoader)
  • getContentResolver (Context)
  • getExternalFilesDir (Context)
  • SocketException (java.net)
    This SocketException may be thrown during socket creation or setting options, and is the superclass
  • LinkedHashMap (java.util)
    LinkedHashMap is an implementation of Map that guarantees iteration order. All optional operations a
  • Scanner (java.util)
    A parser that parses a text string of primitive types and strings with the help of regular expressio
  • Executors (java.util.concurrent)
    Factory and utility methods for Executor, ExecutorService, ScheduledExecutorService, ThreadFactory,
  • Semaphore (java.util.concurrent)
    A counting semaphore. Conceptually, a semaphore maintains a set of permits. Each #acquire blocks if
  • Options (org.apache.commons.cli)
    Main entry-point into the library. Options represents a collection of Option objects, which describ
  • Best IntelliJ plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now