@Override public Class<? extends InputFormat> getInputFormatClass() { return table.getInputFormatClass(); }
public static TableDesc getTableDesc(Table tbl) { Properties props = tbl.getMetadata(); props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName()); return (new TableDesc(tbl.getInputFormatClass(), tbl .getOutputFormatClass(), props)); }
public static TableDesc getTableDesc(Table tbl) { Properties props = tbl.getMetadata(); props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName()); return (new TableDesc(tbl.getInputFormatClass(), tbl .getOutputFormatClass(), props)); }
final public Class<? extends InputFormat> getInputFormatClass() throws HiveException { if (inputFormatClass == null) { // sd can be null for views String clsName = tPartition.getSd() == null ? null : tPartition.getSd().getInputFormat(); if (clsName == null) { return inputFormatClass = table.getInputFormatClass(); } try { inputFormatClass = ((Class<? extends InputFormat>) Class.forName(clsName, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + clsName, e); } } return inputFormatClass; }
final public Class<? extends InputFormat> getInputFormatClass() throws HiveException { if (inputFormatClass == null) { // sd can be null for views String clsName = tPartition.getSd() == null ? null : tPartition.getSd().getInputFormat(); if (clsName == null) { return inputFormatClass = table.getInputFormatClass(); } try { inputFormatClass = ((Class<? extends InputFormat>) Class.forName(clsName, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + clsName, e); } } return inputFormatClass; }
tblLoc = tbl.getDataLocation().toString(); inputFormattCls = tbl.getInputFormatClass() == null ? null : tbl.getInputFormatClass().getName(); outputFormattCls = tbl.getOutputFormatClass() == null ? null : tbl.getOutputFormatClass().getName();
private void ensureFileFormatsMatch(TableSpec ts, List<FileStatus> fileStatuses, final URI fromURI) throws SemanticException { final Class<? extends InputFormat> destInputFormat; try { if (ts.getPartSpec() == null || ts.getPartSpec().isEmpty()) { destInputFormat = ts.tableHandle.getInputFormatClass(); } else { destInputFormat = ts.partHandle.getInputFormatClass(); } } catch (HiveException e) { throw new SemanticException(e); } try { FileSystem fs = FileSystem.get(fromURI, conf); boolean validFormat = HiveFileFormatUtils.checkInputFormat(fs, conf, destInputFormat, fileStatuses); if (!validFormat) { throw new SemanticException(ErrorMsg.INVALID_FILE_FORMAT_IN_LOAD.getMsg()); } } catch (Exception e) { throw new SemanticException("Unable to load data to destination table." + " Error: " + e.getMessage()); } }
long len = getPathLength(jobConf, table.getPath(), table.getInputFormatClass(), threshold); if (LOG.isDebugEnabled()) { LOG.debug("Threshold " + len + " exceeded for pseudoMR mode");
private void ensureFileFormatsMatch(TableSpec ts, List<FileStatus> fileStatuses, final URI fromURI) throws SemanticException { final Class<? extends InputFormat> destInputFormat; try { if (ts.getPartSpec() == null || ts.getPartSpec().isEmpty()) { destInputFormat = ts.tableHandle.getInputFormatClass(); } else { destInputFormat = ts.partHandle.getInputFormatClass(); } } catch (HiveException e) { throw new SemanticException(e); } try { FileSystem fs = FileSystem.get(fromURI, conf); boolean validFormat = HiveFileFormatUtils.checkInputFormat(fs, conf, destInputFormat, fileStatuses); if (!validFormat) { throw new SemanticException(ErrorMsg.INVALID_FILE_FORMAT_IN_LOAD.getMsg()); } } catch (Exception e) { throw new SemanticException("Unable to load data to destination table." + " Error: " + e.getMessage()); } } }
tblLoc = tbl.getDataLocation().toString(); inputFormattCls = tbl.getInputFormatClass() == null ? null : tbl.getInputFormatClass().getName(); outputFormattCls = tbl.getOutputFormatClass() == null ? null : tbl.getOutputFormatClass().getName();
private Task<?> genTableStats(ParseContext parseContext, TableScanOperator tableScan, Task currentTask, final HashSet<WriteEntity> outputs) throws HiveException { Class<? extends InputFormat> inputFormat = tableScan.getConf().getTableMetadata() .getInputFormatClass(); Table table = tableScan.getConf().getTableMetadata(); List<Partition> partitions = new ArrayList<>();
Class<? extends InputFormat> inputFormat = table.getInputFormatClass();
tblLoc = tbl.getDataLocation().toString(); inputFormattCls = tbl.getInputFormatClass().getName(); outputFormattCls = tbl.getOutputFormatClass().getName();
Class<? extends InputFormat> inputFormat = table.getInputFormatClass();
throw new HiveException("must specify a non-null serDe"); if (null == getInputFormatClass()) { throw new HiveException("must specify an InputFormat class");
private Task<?> genTableStats(GenTezProcContext context, TableScanOperator tableScan) throws HiveException { Class<? extends InputFormat> inputFormat = tableScan.getConf().getTableMetadata() .getInputFormatClass(); ParseContext parseContext = context.parseContext; Table table = tableScan.getConf().getTableMetadata();
Class<? extends InputFormat> inputFormat = table.getInputFormatClass(); Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
throw new HiveException("must specify a non-null serDe"); if (null == getInputFormatClass()) { throw new HiveException("must specify an InputFormat class");
tbl.setOutputFormatClass(getOutputFormat()); if (getInputFormat() != null && !getInputFormat().isEmpty()) { tbl.getSd().setInputFormat(tbl.getInputFormatClass().getName());