String colPath = descTbl.getColumnPath(); String tableName = descTbl.getTableName(); if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { throw new HiveException(ErrorMsg.INVALID_PARTITION, StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName); DataOutputStream outStream = getOutputStream(descTbl.getResFile()); try { LOG.debug("DDLTask: got data for " + tbl.getTableName()); tbl.getCols() : part.getCols(); if (!descTbl.isFormatted()) { cols.addAll(tbl.getPartCols()); if (descTbl.isFormatted()) { if (descTbl.isExt() || descTbl.isFormatted()) { pkInfo = db.getPrimaryKeys(tbl.getDbName(), tbl.getTableName()); fkInfo = db.getForeignKeys(tbl.getDbName(), tbl.getTableName()); cols, descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty(), isOutputPadded, colStats, pkInfo, fkInfo);
public static String[] getColumnsHeader(List<ColumnStatisticsObj> colStats) { boolean showColStats = false; if (colStats != null) { showColStats = true; } return DescTableDesc.getSchema(showColStats).split("#")[0].split(","); }
DescTableDesc descTblDesc = new DescTableDesc( ctx.getResFile(), tableName, partSpec, colPath); if (ast.getChildCount() == 2) { int descOptions = ast.getChild(1).getType(); descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); if (!colPath.equalsIgnoreCase(tableName) && descTblDesc.isFormatted()) { showColStats = true; descTblDesc)); rootTasks.add(ddlTask); String schema = DescTableDesc.getSchema(showColStats); setFetchTask(createFetchTask(schema)); LOG.info("analyzeDescribeTable done");
String tableName = extractTableName(descTable.getTableName()); authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
DescTableDesc descTblDesc = new DescTableDesc( ctx.getResFile(), tableName, partSpec, colPath); if (ast.getChildCount() == 2) { int descOptions = ast.getChild(1).getType(); descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); descTblDesc.setPretty(descOptions == HiveParser.KW_PRETTY); if (!colPath.equalsIgnoreCase(tableName) && descTblDesc.isFormatted()) { showColStats = true; descTblDesc), conf); rootTasks.add(ddlTask); String schema = DescTableDesc.getSchema(showColStats); setFetchTask(createFetchTask(schema)); LOG.info("analyzeDescribeTable done");
String tableName = extractTableName(descTable.getTableName()); authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
String colPath = descTbl.getColumnPath(); String tableName = descTbl.getTableName(); if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { throw new HiveException(ErrorMsg.INVALID_PARTITION, StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName); DataOutputStream outStream = getOutputStream(descTbl.getResFile()); try { LOG.debug("DDLTask: got data for {}", tableName); if (!descTbl.isFormatted()) { cols.addAll(tbl.getPartCols()); if (descTbl.isFormatted()) { CheckConstraint cInfo = null; StorageHandlerInfo storageHandlerInfo = null; if (descTbl.isExt() || descTbl.isFormatted()) { pkInfo = db.getPrimaryKeys(tbl.getDbName(), tbl.getTableName()); fkInfo = db.getForeignKeys(tbl.getDbName(), tbl.getTableName()); cols, descTbl.isFormatted(), descTbl.isExt(), isOutputPadded, colStats, pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo, storageHandlerInfo);
private void analyzeDescribeTable(ASTNode ast) throws SemanticException { ASTNode tableTypeExpr = (ASTNode) ast.getChild(0); String tableName = getFullyQualifiedName((ASTNode) tableTypeExpr .getChild(0)); HashMap<String, String> partSpec = null; // get partition metadata if partition specified if (tableTypeExpr.getChildCount() == 2) { ASTNode partspec = (ASTNode) tableTypeExpr.getChild(1); partSpec = getPartSpec(partspec); } DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec); if (ast.getChildCount() == 2) { int descOptions = ast.getChild(1).getType(); descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); } rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descTblDesc), conf)); setFetchTask(createFetchTask(DescTableDesc.getSchema())); LOG.info("analyzeDescribeTable done"); }
String tableName = extractTableName(descTable.getTableName()); authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
public static String[] getColumnsHeader(List<ColumnStatisticsObj> colStats) { boolean showColStats = false; if (colStats != null) { showColStats = true; } return DescTableDesc.getSchema(showColStats).split("#")[0].split(","); }
String colPath = descTbl.getColumnPath(); String tableName = descTbl.getTableName(); DataOutputStream outStream = null; try { Path resFile = new Path(descTbl.getResFile()); if (tbl == null) { FileSystem fs = resFile.getFileSystem(conf); if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { FileSystem fs = resFile.getFileSystem(conf); outStream = null; throw new HiveException(ErrorMsg.INVALID_PARTITION, StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName); Path resFile = new Path(descTbl.getResFile()); FileSystem fs = resFile.getFileSystem(conf); outStream = fs.create(resFile); if (!descTbl.isFormatted()) { cols.addAll(tbl.getPartCols()); if (descTbl.isFormatted()) { cols, descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty(), isOutputPadded, colStats);
DescTableDesc descTblDesc = new DescTableDesc( ctx.getResFile(), tableName, partSpec, colPath); if (ast.getChildCount() == 2) { int descOptions = ast.getChild(1).getType(); descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED); descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED); descTblDesc.setPretty(descOptions == HiveParser.KW_PRETTY); if (!colPath.equalsIgnoreCase(tableName) && descTblDesc.isFormatted()) { showColStats = true; descTblDesc), conf); rootTasks.add(ddlTask); String schema = DescTableDesc.getSchema(showColStats); setFetchTask(createFetchTask(schema)); LOG.info("analyzeDescribeTable done");
String tableName = extractTableName(descTable.getTableName()); authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
public static String[] getColumnsHeader(List<ColumnStatisticsObj> colStats) { boolean showColStats = false; if (colStats != null) { showColStats = true; } return DescTableDesc.getSchema(showColStats).split("#")[0].split(","); }
String colPath = descTbl.getTableName(); String tableName = colPath.substring(0, colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.')); Partition part = null; try { Path resFile = new Path(descTbl.getResFile()); if (tbl == null) { FileSystem fs = resFile.getFileSystem(conf); if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { FileSystem fs = resFile.getFileSystem(conf); DataOutput outStream = (DataOutput) fs.open(resFile); String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); Path resFile = new Path(descTbl.getResFile()); FileSystem fs = resFile.getFileSystem(conf); DataOutput outStream = fs.create(resFile); if (!descTbl.isFormatted()) { List<FieldSchema> cols = tbl.getCols(); if (tableName.equals(colPath)) { if (descTbl.isFormatted()) { outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols));
String tableName = extractTableName(descTable.getTableName()); authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
public static String[] getColumnsHeader() { return DescTableDesc.getSchema().split("#")[0].split(","); }