protected Object[] getBucketFields(Object row) { for (int i = 0; i < bucketIds.size(); i++) { bucketFieldData[i] = inputRowObjectInspector.getStructFieldData(row, bucketStructFields[i]); } return bucketFieldData; }
@Override public void setKeyValue(Writable key, Writable val) throws SerDeException { Object keyObj = keySerDe.deserialize(key), valObj = valSerDe.deserialize(val); List<? extends StructField> keyFields = keySoi.getAllStructFieldRefs(), valFields = valSoi.getAllStructFieldRefs(); for (int i = 0; i < keyFields.size(); ++i) { keyObjs[i] = keySoi.getStructFieldData(keyObj, keyFields.get(i)); } for (int i = 0; i < valFields.size(); ++i) { valObjs[i] = valSoi.getStructFieldData(valObj, valFields.get(i)); } }
@Override public int compare(Object object1, Object object2) { int result = 0; /**If multiple fields are mentioned for sorting a record then inside the loop we do will do sorting for each field*/ for (int i = 0; i < noOfInputFields; i++) { Object o1 = structObjectInspector.getStructFieldData(object1, fields[i]); Object o2 = structObjectInspector.getStructFieldData(object2, fields[i]); result = ObjectInspectorUtils.compare(o1, fields[i].getFieldObjectInspector(), o2, fields[i].getFieldObjectInspector()); if (result != 0) { /**Ordering*/ if (sortOrder == SORT_ORDER_TYPE.DESC) { result *= -1; } return result; } } return result; } });
@Override @SuppressWarnings("unchecked") public Object getStructFieldData(Object data, StructField fieldRef) { if (data == null) { return null; } MyField f = (MyField) fieldRef; Object fieldData; // We support both List<Object> and Object[] // so we have to do differently. if (! (data instanceof List)) { Object[] list = (Object[]) data; assert (list.length == unionObjectInspectors.size()); fieldData = list[f.structID]; } else { List<Object> list = (List<Object>) data; assert (list.size() == unionObjectInspectors.size()); fieldData = list.get(f.structID); } return unionObjectInspectors.get(f.structID).getStructFieldData(fieldData, f.structField); }
oRow.add(oColumns.get(j).get(i)); for(StructField f : inputOI.getAllStructFieldRefs()) { oRow.add(inputOI.getStructFieldData(iRow, f));
/** * Copy fields in the input row to the output array of standard objects. * * @param result * output list of standard objects. * @param row * input row. * @param soi * Object inspector for the to-be-copied columns. * @param objectInspectorOption */ public static void copyToStandardObject(List<Object> result, Object row, StructObjectInspector soi, ObjectInspectorCopyOption objectInspectorOption) { List<? extends StructField> fields = soi.getAllStructFieldRefs(); for (StructField f : fields) { result.add(copyToStandardObject(soi.getStructFieldData(row, f), f.getFieldObjectInspector(), objectInspectorOption)); } }
private List<Object> nextOutputRow() throws HiveException { List<Object> oRow = new ArrayList<Object>(); Object iRow = rollingPart.nextOutputRow(); int i = 0; for (; i < fnOutputs.length; i++) { oRow.add(fnOutputs[i].remove(0)); } for (StructField f : rollingPart.getOutputOI().getAllStructFieldRefs()) { oRow.add(rollingPart.getOutputOI().getStructFieldData(iRow, f)); } if ( rnkLimit != null ) { rnkLimit.updateRank(oRow); } return oRow; }
@Override public Object get(int fieldNum) { try { StructField fref = soi.getAllStructFieldRefs().get(fieldNum); return HCatRecordSerDe.serializeField( soi.getStructFieldData(wrappedObject, fref), fref.getFieldObjectInspector()); } catch (SerDeException e) { throw new IllegalStateException("SerDe Exception deserializing",e); } }
protected void readHiveRecordAndInsertIntoRecordBatch(Object deSerializedValue, int outputRecordIndex) { for (int i = 0; i < selectedStructFieldRefs.size(); i++) { Object hiveValue = finalOI.getStructFieldData(deSerializedValue, selectedStructFieldRefs.get(i)); if (hiveValue != null) { selectedColumnFieldConverters.get(i).setSafeValue(selectedColumnObjInspectors.get(i), hiveValue, vectors.get(i), outputRecordIndex); } } }
private Object[] getBucketFields(Object row) throws SerializationError { StructObjectInspector recordObjInspector = getRecordObjectInspector(); StructField[] bucketStructFields = getBucketStructFields(); for (int i = 0; i < bucketIds.size(); i++) { bucketFieldData[i] = recordObjInspector.getStructFieldData(row, bucketStructFields[i]); } return bucketFieldData; }
oRow.add(oColumns.get(j).get(i)); for(StructField f : inputOI.getAllStructFieldRefs()) { oRow.add(inputOI.getStructFieldData(iRow, f));
/** * Copy specified fields in the input row to the output array of standard objects. * @param result output list of standard objects. * @param row input row. * @param startCol starting column number from the input row. * @param numCols number of columns to copy. * @param soi Object inspector for the to-be-copied columns. */ public static void partialCopyToStandardObject(List<Object> result, Object row, int startCol, int numCols, StructObjectInspector soi, ObjectInspectorCopyOption objectInspectorOption) { List<? extends StructField> fields = soi.getAllStructFieldRefs(); int i = 0, j = 0; for (StructField f : fields) { if (i++ >= startCol) { result.add(copyToStandardObject(soi.getStructFieldData(row, f), f.getFieldObjectInspector(), objectInspectorOption)); if (++j == numCols) { break; } } } }
@Override public int compare(Object object1, Object object2) { int result = 0; /**If multiple fields are mentioned for sorting a record then inside the loop we do will do sorting for each field*/ for (int i = 0; i < noOfInputFields; i++) { Object o1 = structObjectInspector.getStructFieldData(object1, fields[i]); Object o2 = structObjectInspector.getStructFieldData(object2, fields[i]); result = ObjectInspectorUtils.compare(o1, fields[i].getFieldObjectInspector(), o2, fields[i].getFieldObjectInspector()); if (result != 0) { /**Ordering*/ if (sortOrder == SORT_ORDER_TYPE.DESC) { result *= -1; } return result; } } return result; } });
private List<Object> nextOutputRow() throws HiveException { List<Object> oRow = new ArrayList<Object>(); Object iRow = rollingPart.nextOutputRow(); int i = 0; for (; i < fnOutputs.length; i++) { oRow.add(fnOutputs[i].remove(0)); } for (StructField f : rollingPart.getOutputOI().getAllStructFieldRefs()) { oRow.add(rollingPart.getOutputOI().getStructFieldData(iRow, f)); } if ( rnkLimit != null ) { rnkLimit.updateRank(oRow); } return oRow; }
@Override public Object convert(Object row, ObjectInspector rowOI) throws Exception { StructObjectInspector structOI = (StructObjectInspector) rowOI; List<? extends StructField> fields = structOI.getAllStructFieldRefs(); Object[] converted = new Object[fields.size()]; for (int i = 0 ; i < converted.length; i++) { StructField fieldRef = fields.get(i); Object field = structOI.getStructFieldData(row, fieldRef); converted[i] = field == null ? null : SerDeUtils.toThriftPayload(field, fieldRef.getFieldObjectInspector(), protocol); } return converted; }
/** * returns null for data = null. * * @param data input data. */ @Override public List<Object> getStructFieldsDataAsList(Object data) { if (data == null) { return null; } int size = getAllStructFieldRefs().size(); List<Object> res = new ArrayList<>(size); for (int i = 0; i < size; i++) { res.add(baseOI.getStructFieldData(data, getAllStructFieldRefs().get(i))); } return res; }
private static void assertFileContentsDwrfHive( Type type, TempFile tempFile, Iterable<?> expectedValues) throws Exception { JobConf configuration = new JobConf(new Configuration(false)); configuration.set(READ_COLUMN_IDS_CONF_STR, "0"); configuration.setBoolean(READ_ALL_COLUMNS, false); Path path = new Path(tempFile.getFile().getAbsolutePath()); com.facebook.hive.orc.Reader reader = com.facebook.hive.orc.OrcFile.createReader( path.getFileSystem(configuration), path, configuration); boolean[] include = new boolean[reader.getTypes().size() + 100000]; Arrays.fill(include, true); com.facebook.hive.orc.RecordReader recordReader = reader.rows(include); StructObjectInspector rowInspector = (StructObjectInspector) reader.getObjectInspector(); StructField field = rowInspector.getStructFieldRef("test"); Iterator<?> iterator = expectedValues.iterator(); Object rowData = null; while (recordReader.hasNext()) { rowData = recordReader.next(rowData); Object expectedValue = iterator.next(); Object actualValue = rowInspector.getStructFieldData(rowData, field); actualValue = decodeRecordReaderValue(type, actualValue); assertColumnValueEquals(type, actualValue, expectedValue); } assertFalse(iterator.hasNext()); }
Object row = rows.next(null); IntWritable intWritable = (IntWritable)soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(0)); Text text = (Text)soi.getStructFieldData(row, soi.getAllStructFieldRefs().get(1));
private void stringifyObject(StringBuilder buffer, Object obj, ObjectInspector inspector ) throws IOException { if (inspector instanceof StructObjectInspector) { buffer.append("{ "); StructObjectInspector soi = (StructObjectInspector) inspector; boolean isFirst = true; for(StructField field: soi.getAllStructFieldRefs()) { if (isFirst) { isFirst = false; } else { buffer.append(", "); } buffer.append(field.getFieldName()); buffer.append(": "); stringifyObject(buffer, soi.getStructFieldData(obj, field), field.getFieldObjectInspector()); } buffer.append(" }"); } else if (inspector instanceof PrimitiveObjectInspector) { PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector; buffer.append(poi.getPrimitiveJavaObject(obj).toString()); } else { buffer.append("*unknown*"); } }
@Override public void addRow(Object row) throws IOException { int rowId = internalBatch.size++; if (fields != null) { StructObjectInspector soi = (StructObjectInspector) inspector; for(int i=0; i < fields.length; ++i) { setColumn(rowId, internalBatch.cols[i], fields[i].getFieldObjectInspector(), soi.getStructFieldData(row, fields[i])); } } else { setColumn(rowId, internalBatch.cols[0], inspector, row); } if (internalBatch.size == internalBatch.getMaxSize()) { flushInternalBatch(); } }