public TestHCatDynamicPartitioned(String formatName, String serdeClass, String inputFormatClass, String outputFormatClass) throws Exception { super(formatName, serdeClass, inputFormatClass, outputFormatClass); tableName = "testHCatDynamicPartitionedTable_" + formatName; generateWriteRecords(NUM_RECORDS, NUM_PARTITIONS, 0); generateDataColumns(); }
public void _testHCatDynamicPartitionMaxPartitions() throws Exception { HiveConf hc = new HiveConf(this.getClass()); int maxParts = hiveConf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS); LOG.info("Max partitions allowed = {}", maxParts); IOException exc = null; try { generateWriteRecords(maxParts + 5, maxParts + 2, 10); runMRCreate(null, dataColumns, writeRecords, maxParts + 5, false); } catch (IOException e) { exc = e; } if (HCatConstants.HCAT_IS_DYNAMIC_MAX_PTN_CHECK_ENABLED) { assertTrue(exc != null); assertTrue(exc instanceof HCatException); assertEquals(ErrorType.ERROR_TOO_MANY_DYNAMIC_PTNS, ((HCatException) exc).getErrorType()); } else { assertTrue(exc == null); runMRRead(maxParts + 5); } } }
protected void runHCatDynamicPartitionedTable(boolean asSingleMapTask, String customDynamicPathPattern) throws Exception { generateWriteRecords(NUM_RECORDS, NUM_PARTITIONS, 0); runMRCreate(null, dataColumns, writeRecords, NUM_RECORDS, true, asSingleMapTask, customDynamicPathPattern); generateWriteRecords(NUM_RECORDS, NUM_PARTITIONS, 0); Job job = runMRCreate(null, dataColumns, writeRecords, NUM_RECORDS, false, true, customDynamicPathPattern);