@Override public InternalRow get() { return new GenericInternalRow(new Object[] {start, -start}); }
@Override public InternalRow get() { return new GenericInternalRow(new Object[] {i[current], j[current]}); }
@Override public InternalRow get() { return new GenericInternalRow(new Object[] {i[current], j[current]}); }
@Override public InternalRow get() { return new GenericInternalRow(new Object[] {start, -start}); }
@Override public InternalRow get() { Object[] values = new Object[requiredSchema.size()]; for (int i = 0; i < values.length; i++) { if ("i".equals(requiredSchema.apply(i).name())) { values[i] = start; } else if ("j".equals(requiredSchema.apply(i).name())) { values[i] = -start; } } return new GenericInternalRow(values); }
@Override public InternalRow get() { Object[] values = new Object[requiredSchema.size()]; for (int i = 0; i < values.length; i++) { if ("i".equals(requiredSchema.apply(i).name())) { values[i] = start; } else if ("j".equals(requiredSchema.apply(i).name())) { values[i] = -start; } } return new GenericInternalRow(values); }
@Override public InternalRow readRow(Object[] data) { return new GenericInternalRow(data); } }
@Override protected GenericInternalRow newStructData(InternalRow reuse) { if (reuse instanceof GenericInternalRow) { return (GenericInternalRow) reuse; } else { return new GenericInternalRow(numFields); } }
@Override public Object wrapWithGenericRow(Object[] fields) { return new GenericInternalRow(fields); }
@Override public InternalRow copy() { GenericInternalRow row = new GenericInternalRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {
GenericInternalRow row = new GenericInternalRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {
@Override public InternalRow copy() { GenericInternalRow row = new GenericInternalRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {
GenericInternalRow row = new GenericInternalRow(numFields); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {
GenericInternalRow row = new GenericInternalRow(numFields); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {
@Override public InternalRow serialize(T obj) { byte[] bytes = new TWKBWriter().write(obj); InternalRow returnRow = new GenericInternalRow(bytes.length); returnRow.update(0, bytes); return returnRow; }
@Override public InternalRow struct(Types.StructType struct, Iterable<Object> fieldResults) { List<Object> values = Lists.newArrayList(fieldResults); GenericInternalRow row = new GenericInternalRow(values.size()); for (int i = 0; i < values.size(); i += 1) { row.update(i, values.get(i)); } return row; }
PartitionRowConverter(Schema partitionSchema, PartitionSpec spec) { StructType partitionType = SparkSchemaUtil.convert(partitionSchema); StructField[] fields = partitionType.fields(); this.types = new DataType[fields.length]; this.positions = new int[types.length]; this.javaTypes = new Class<?>[types.length]; this.reusedRow = new GenericInternalRow(types.length); List<PartitionField> partitionFields = spec.fields(); for (int rowIndex = 0; rowIndex < fields.length; rowIndex += 1) { this.types[rowIndex] = fields[rowIndex].dataType(); int sourceId = partitionSchema.columns().get(rowIndex).fieldId(); for (int specIndex = 0; specIndex < partitionFields.size(); specIndex += 1) { PartitionField field = spec.fields().get(specIndex); if (field.sourceId() == sourceId && "identity".equals(field.transform().toString())) { positions[rowIndex] = specIndex; javaTypes[rowIndex] = spec.javaClasses()[specIndex]; break; } } } }
private void initializeAtFirstRow() throws IOException { filterValues = new Object[carbonTable.getDimensionOrdinalMax() + measureCount]; filterRow = new RowImpl(); filterRow.setValues(filterValues); outputValues = new Object[projection.length]; outputRow = new GenericInternalRow(outputValues); Path file = fileSplit.getPath(); byte[] syncMarker = getSyncMarker(file.toString()); FileSystem fs = file.getFileSystem(hadoopConf); int bufferSize = Integer.parseInt(hadoopConf.get(CarbonStreamInputFormat.READ_BUFFER_SIZE, CarbonStreamInputFormat.READ_BUFFER_SIZE_DEFAULT)); FSDataInputStream fileIn = fs.open(file, bufferSize); fileIn.seek(fileSplit.getStart()); input = new StreamBlockletReader(syncMarker, fileIn, fileSplit.getLength(), fileSplit.getStart() == 0, compressorName); cacheProvider = CacheProvider.getInstance(); cache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY); queryTypes = CarbonStreamInputFormat.getComplexDimensions(carbonTable, storageColumns, cache); outputSchema = new StructType((StructField[]) DataTypeUtil.getDataTypeConverter().convertCarbonSchemaToSparkSchema(projection)); }
@Override public InternalRow read(Decoder decoder, Object reuse) throws IOException { GenericInternalRow row = new GenericInternalRow(readers.length); if (decoder instanceof ResolvingDecoder) { // this may not set all of the fields. nulls are set by default. for (Schema.Field field : ((ResolvingDecoder) decoder).readFieldOrder()) { Object value = readers[field.pos()].read(decoder, null); if (value != null) { row.update(field.pos(), value); } else { row.setNullAt(field.pos()); } } } else { for (int i = 0; i < readers.length; i += 1) { Object value = readers[i].read(decoder, null); if (value != null) { row.update(i, value); } else { row.setNullAt(i); } } } return row; } }
GenericInternalRow row = new GenericInternalRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) {