@Override public void readFields(DataInput in) throws IOException { name = in.readUTF(); typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(in.readUTF()); } }
@JsonCreator public static HiveType valueOf(String hiveTypeName) { requireNonNull(hiveTypeName, "hiveTypeName is null"); return toHiveType(getTypeInfoFromTypeString(hiveTypeName)); }
public static ArrayList<TypeInfo> typeInfosFromTypeNames(List<String> typeNames) { ArrayList<TypeInfo> result = new ArrayList<TypeInfo>(typeNames.size()); for(int i = 0; i < typeNames.size(); i++) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i)); result.add(typeInfo); } return result; }
public static boolean isSupportedField(String typeName) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName); return isSupportedField(typeInfo); }
@VisibleForTesting void init(List<String> typeNames) throws HiveException { final int count = typeNames.size(); allocateArrays(count); for (int i = 0; i < count; i++) { initEntry(i, i, TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i))); } }
public static ColumnVector createColumnVector(String typeName, DataTypePhysicalVariation dataTypePhysicalVariation) { typeName = typeName.toLowerCase(); // Allow undecorated CHAR and VARCHAR to support scratch column type names. if (typeName.equals("char") || typeName.equals("varchar")) { return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE); } TypeInfo typeInfo = (TypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(typeName); return createColumnVector(typeInfo, dataTypePhysicalVariation); }
public void init(List<String> typeNames) throws HiveException { final int count = typeNames.size(); allocateArrays(count); for (int i = 0; i < count; i++) { final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i)); initTargetEntry(i, i, typeInfo); } }
private void allocateOverflowBatchColumnVector(VectorizedRowBatch overflowBatch, int outputColumn, String typeName) throws HiveException { if (overflowBatch.cols[outputColumn] == null) { typeName = VectorizationContext.mapTypeNameSynonyms(typeName); TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName); overflowBatch.cols[outputColumn] = VectorizedBatchUtil.createColumnVector(typeInfo); if (LOG.isDebugEnabled()) { LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp overflowBatch outputColumn " + outputColumn + " class " + overflowBatch.cols[outputColumn].getClass().getSimpleName()); } } }
public void initialize(HCatSchema hsch) throws SerDeException { LOG.debug("Initializing HCatRecordSerDe through HCatSchema {}.", hsch); rowTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(hsch.getSchemaAsTypeString()); cachedObjectInspector = HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo); }
public static ArrayList<TypeInfo> typeInfosFromStructObjectInspector( StructObjectInspector structObjectInspector) { List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs(); ArrayList<TypeInfo> typeInfoList = new ArrayList<TypeInfo>(fields.size()); for(StructField field : fields) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString( field.getFieldObjectInspector().getTypeName()); typeInfoList.add(typeInfo); } return typeInfoList; }
private List<FieldDesc> convertSchema(List<FieldSchema> fieldSchemas) { List<FieldDesc> colDescs = new ArrayList<FieldDesc>(); for (FieldSchema fs : fieldSchemas) { String colName = fs.getName(); String typeString = fs.getType(); colDescs.add(new FieldDesc(colName, TypeInfoUtils.getTypeInfoFromTypeString(typeString))); } return colDescs; }
private void allocateOverflowBatchColumnVector(VectorizedRowBatch overflowBatch, int outputColumn, String typeName) throws HiveException { if (overflowBatch.cols[outputColumn] == null) { typeName = VectorizationContext.mapTypeNameSynonyms(typeName); TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName); overflowBatch.cols[outputColumn] = VectorizedBatchUtil.createColumnVector(typeInfo); } }
private static StructObjectInspector makeObjectInspector(final String typeString) { final OrcSerde serde = new OrcSerde(); TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString); Preconditions.checkArgument( typeInfo instanceof StructTypeInfo, StringUtils.format("typeString should be struct type but not [%s]", typeString) ); Properties table = getTablePropertiesFromStructTypeInfo((StructTypeInfo) typeInfo); serde.initialize(new Configuration(), table); try { return (StructObjectInspector) serde.getObjectInspector(); } catch (SerDeException e) { throw new RuntimeException(e); } }
public void init(StructObjectInspector structObjectInspector) throws HiveException { final List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs(); final int count = fields.size(); allocateArrays(count); for (int i = 0; i < count; i++) { final StructField field = fields.get(i); final ObjectInspector fieldInspector = field.getFieldObjectInspector(); final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fieldInspector.getTypeName()); initTargetEntry(i, i, typeInfo); } }
public VectorExpressionWriter init(SettableMapObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); vectorExtractRow = new VectorExtractRow(); mapTypeInfo = (MapTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; }
public VectorExpressionWriter init(SettableStructObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); vectorExtractRow = new VectorExtractRow(); structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; }
public VectorExpressionWriter init(SettableUnionObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); vectorExtractRow = new VectorExtractRow(); unionTypeInfo = (UnionTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; }
/** * Convert a HCatFieldSchema to a FieldSchema * @param fs FieldSchema to convert * @return HCatFieldSchema representation of FieldSchema * @throws HCatException */ public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException { String fieldName = fs.getName(); TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()); return getHCatFieldSchema(fieldName, baseTypeInfo, fs.getComment()); }
public VectorExpressionWriter init(SettableListObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); vectorExtractRow = new VectorExtractRow(); listTypeInfo = (ListTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; }
private void doStructFieldTests(Random random) throws Exception { String structTypeName = VectorRandomRowSource.getDecoratedTypeName( random, "struct", SupportedTypes.ALL, /* allowedTypeNameSet */ null, /* depth */ 0, /* maxDepth */ 2); StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(structTypeName); List<String> fieldNameList = structTypeInfo.getAllStructFieldNames(); final int fieldCount = fieldNameList.size(); for (int fieldIndex = 0; fieldIndex < fieldCount; fieldIndex++) { doOneStructFieldTest(random, structTypeInfo, structTypeName, fieldIndex); } }