Refine search
private Schema convertSchema(Object obj) throws HiveException { org.apache.hadoop.hive.metastore.api.Schema schema = (org.apache.hadoop.hive.metastore.api.Schema) obj; List<FieldDesc> colDescs = new ArrayList<FieldDesc>(); for (FieldSchema fs : schema.getFieldSchemas()) { String colName = fs.getName(); String typeString = fs.getType(); TypeDesc typeDesc = convertTypeString(typeString); colDescs.add(new FieldDesc(colName, typeDesc)); } Schema Schema = new Schema(colDescs); return Schema; }
private Schema getSchema() { Schema sch = new Schema(); FieldSchema tmpFieldSchema = new FieldSchema(); tmpFieldSchema.setName(SET_COLUMN_NAME); tmpFieldSchema.setType(STRING_TYPE_NAME); sch.putToProperties(SERIALIZATION_NULL_FORMAT, defaultNullString); sch.addToFieldSchemas(tmpFieldSchema); return sch; }
/** * Performs a deep copy on <i>other</i>. */ public Schema(Schema other) { if (other.isSetFieldSchemas()) { List<FieldSchema> __this__fieldSchemas = new ArrayList<FieldSchema>(other.fieldSchemas.size()); for (FieldSchema other_element : other.fieldSchemas) { __this__fieldSchemas.add(new FieldSchema(other_element)); } this.fieldSchemas = __this__fieldSchemas; } if (other.isSetProperties()) { Map<String,String> __this__properties = new HashMap<String,String>(other.properties); this.properties = __this__properties; } }
public Object getFieldValue(_Fields field) { switch (field) { case FIELD_SCHEMAS: return getFieldSchemas(); case PROPERTIES: return getProperties(); } throw new IllegalStateException(); }
private Schema getSchema() { Schema sch = new Schema(); sch.addToFieldSchemas(new FieldSchema("hostName", "string", "")); sch.addToFieldSchemas(new FieldSchema("purgedMemoryBytes", "string", "")); sch.putToProperties(SERIALIZATION_NULL_FORMAT, defaultNullString); return sch; }
/** * If enabled and applicable to this command, print the field headers * for the output. * * @param qp Driver that executed the command * @param out PrintStream which to send output to */ private void printHeader(IDriver qp, PrintStream out) { List<FieldSchema> fieldSchemas = qp.getSchema().getFieldSchemas(); if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER) && fieldSchemas != null) { // Print the column names boolean first_col = true; for (FieldSchema fs : fieldSchemas) { if (!first_col) { out.print('\t'); } out.print(fs.getName()); first_col = false; } out.println(); } }
for (int _i302 = 0; _i302 < _list300.size; ++_i302) _elem301 = new FieldSchema(); _elem301.read(iprot); struct.fieldSchemas.add(_elem301); struct.setFieldSchemasIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); struct.setPropertiesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); struct.validate();
/** * Get a Schema with fields represented with Thrift DDL types */ public Schema getThriftSchema() throws Exception { Schema schema; try { schema = getSchema(); if (schema != null) { List<FieldSchema> lst = schema.getFieldSchemas(); // Go over the schema and convert type to thrift type if (lst != null) { for (FieldSchema f : lst) { f.setType(ColumnType.typeToThriftType(f.getType())); } } } } catch (Exception e) { e.printStackTrace(); throw e; } LOG.info("Returning Thrift schema: " + schema); return schema; }
public DfsProcessor(Configuration conf, boolean addSchema) { dfs = new FsShell(conf); dfsSchema = new Schema(); dfsSchema.addToFieldSchemas(new FieldSchema(DFS_RESULT_HEADER, "string", "")); }
_elem91 = new FieldSchema(); _elem91.read(iprot); this.fieldSchemas.add(_elem91); validate();
@Override public void read(org.apache.thrift.protocol.TProtocol prot, getSchema_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.success = new org.apache.hadoop.hive.metastore.api.Schema(); struct.success.read(iprot); struct.setSuccessIsSet(true); } if (incoming.get(1)) { struct.ex = new HiveServerException(); struct.ex.read(iprot); struct.setExIsSet(true); } } }
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case FIELD_SCHEMAS: return isSetFieldSchemas(); case PROPERTIES: return isSetProperties(); } throw new IllegalStateException(); }
var postSchema = new Schema({ name: String, postedBy: {type: mongoose.Schema.Types.ObjectId, ref: 'User'}, dateCreated: Date, comments: [{body:"string", by: mongoose.Schema.Types.ObjectId}], });
public TableSchema(Schema schema) { this(schema.getFieldSchemas()); }
TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetFieldSchemas()) { optionals.set(0); if (struct.isSetProperties()) { optionals.set(1); if (struct.isSetFieldSchemas()) { _iter309.write(oprot); if (struct.isSetProperties()) {
@Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof Schema) return this.equals((Schema)that); return false; }
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } }
@Override public boolean hasResultSet() { // TODO explain should use a FetchTask for reading for (Task<? extends Serializable> task : plan.getRootTasks()) { if (task.getClass() == ExplainTask.class) { return true; } } if (plan.getFetchTask() != null && schema != null && schema.isSetFieldSchemas()) { return true; } else { return false; } }
public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity if (success != null) { success.validate(); } }