@JsonIgnore public DimensionFieldSpec getDimensionSpec(@Nonnull String dimensionName) { FieldSpec fieldSpec = _fieldSpecMap.get(dimensionName); if (fieldSpec != null && fieldSpec.getFieldType() == FieldType.DIMENSION) { return (DimensionFieldSpec) fieldSpec; } return null; }
@JsonIgnore public DateTimeFieldSpec getDateTimeSpec(@Nonnull String dateTimeName) { FieldSpec fieldSpec = _fieldSpecMap.get(dateTimeName); if (fieldSpec != null && fieldSpec.getFieldType() == FieldType.DATE_TIME) { return (DateTimeFieldSpec) fieldSpec; } return null; }
@JsonIgnore public MetricFieldSpec getMetricSpec(@Nonnull String metricName) { FieldSpec fieldSpec = _fieldSpecMap.get(metricName); if (fieldSpec != null && fieldSpec.getFieldType() == FieldType.METRIC) { return (MetricFieldSpec) fieldSpec; } return null; }
@Override public void postInject() { // Compute the actual default null value from its string representation _defaultNullValue = getDefaultNullValue(getFieldType(), _dataType, _stringDefaultNullValue); }
public void setDefaultNullValue(@Nullable Object defaultNullValue) { if (defaultNullValue != null) { _stringDefaultNullValue = getStringValue(defaultNullValue); } if (_dataType != null) { _defaultNullValue = getDefaultNullValue(getFieldType(), _dataType, _stringDefaultNullValue); } }
public boolean removeField(String columnName) { FieldSpec existingFieldSpec = _fieldSpecMap.remove(columnName); if (existingFieldSpec != null) { FieldType fieldType = existingFieldSpec.getFieldType(); switch (fieldType) { case DIMENSION:
protected void appendDefaultNullValue(ObjectNode jsonNode) { assert _defaultNullValue != null; if (!_defaultNullValue.equals(getDefaultNullValue(getFieldType(), _dataType, null))) { if (_defaultNullValue instanceof Number) { jsonNode.set("defaultNullValue", JsonUtils.objectToJsonNode(_defaultNullValue)); } else { jsonNode.put("defaultNullValue", getStringValue(_defaultNullValue)); } } }
public PinotSegmentSorter(int numDocs, Schema schema, Map<String, PinotSegmentColumnReader> columnReaderMap) { _numDocs = numDocs; _schema = schema; _columnReaderMap = columnReaderMap; _dimensionNames = new ArrayList<>(); for (FieldSpec fieldSpec : _schema.getAllFieldSpecs()) { // Count all fields that are not metrics as dimensions if (fieldSpec.getFieldType() != FieldSpec.FieldType.METRIC) { String dimensionName = fieldSpec.getName(); _numDimensions++; _dimensionNames.add(dimensionName); } } }
public void addField(@Nonnull FieldSpec fieldSpec) { Preconditions.checkNotNull(fieldSpec); String columnName = fieldSpec.getName(); Preconditions.checkNotNull(columnName); Preconditions .checkState(!_fieldSpecMap.containsKey(columnName), "Field spec already exists for column: " + columnName); FieldType fieldType = fieldSpec.getFieldType(); switch (fieldType) { case DIMENSION: _dimensionNames.add(columnName); _dimensionFieldSpecs.add((DimensionFieldSpec) fieldSpec); break; case METRIC: _metricNames.add(columnName); _metricFieldSpecs.add((MetricFieldSpec) fieldSpec); break; case TIME: _timeFieldSpec = (TimeFieldSpec) fieldSpec; break; case DATE_TIME: _dateTimeNames.add(columnName); _dateTimeFieldSpecs.add((DateTimeFieldSpec) fieldSpec); break; default: throw new UnsupportedOperationException("Unsupported field type: " + fieldType); } _fieldSpecMap.put(columnName, fieldSpec); }
public void setDataType(DataType dataType) { _dataType = dataType.getStoredType(); _defaultNullValue = getDefaultNullValue(getFieldType(), _dataType, _stringDefaultNullValue); }
/** * Returns a comma separated list of qualifying field name strings * @param type FieldType to filter on * @return Comma separate qualifying fields names. */ @JsonIgnore private String getQualifyingFields(FieldType type, boolean excludeVirtualColumns) { List<String> fields = new ArrayList<>(); for (final FieldSpec spec : getSchema().getAllFieldSpecs()) { if (excludeVirtualColumns && getSchema().isVirtualColumn(spec.getName())) { continue; } if (spec.getFieldType() == type) { fields.add(spec.getName()); } } Collections.sort(fields); return StringUtils.join(fields, ","); } }
fieldTypes.put(col, fs.getFieldType()); switch (fs.getFieldType()) { case DIMENSION: if (cardinality.get(col) == null) {
FieldType fieldType = fieldSpec.getFieldType(); DataType dataType = fieldSpec.getDataType(); String fieldName = fieldSpec.getName();
/** * Helper method that returns compression type to use based on segment creation spec and field type. * <ul> * <li> Returns compression type from segment creation spec, if specified there.</li> * <li> Else, returns PASS_THROUGH for metrics, and SNAPPY for dimensions. This is because metrics are likely * to be spread in different chunks after applying predicates. Same could be true for dimensions, but in that * case, clients are expected to explicitly specify the appropriate compression type in the spec. </li> * </ul> * @param segmentCreationSpec Segment creation spec * @param fieldSpec Field spec for the column * @return Compression type to use */ private ChunkCompressorFactory.CompressionType getColumnCompressionType(SegmentGeneratorConfig segmentCreationSpec, FieldSpec fieldSpec) { ChunkCompressorFactory.CompressionType compressionType = segmentCreationSpec.getRawIndexCompressionType().get(fieldSpec.getName()); if (compressionType == null) { if (fieldSpec.getFieldType().equals(FieldType.METRIC)) { return ChunkCompressorFactory.CompressionType.PASS_THROUGH; } else { return ChunkCompressorFactory.CompressionType.SNAPPY; } } else { return compressionType; } }
if (fieldSpec.getFieldType().equals(FieldSpec.FieldType.TIME)) { long timeValue; if (value instanceof Number) {
@Nonnull public GenericRow transform(@Nonnull GenericData.Record from, @Nonnull GenericRow to) { for (FieldSpec fieldSpec : _schema.getAllFieldSpecs()) { FieldSpec incomingFieldSpec = fieldSpec.getFieldType() == FieldSpec.FieldType.TIME ? _incomingTimeFieldSpec : fieldSpec; String fieldName = incomingFieldSpec.getName(); Object avroValue = from.get(fieldName); if (incomingFieldSpec.isSingleValueField()) { to.putField(fieldName, AvroUtils.transformAvroValueToObject(avroValue, incomingFieldSpec)); } else { to.putField(fieldName, AvroUtils.transformAvroArrayToObjectArray((Array) avroValue, incomingFieldSpec)); } } return to; } }
if (fieldSpec.getFieldType() != FieldSpec.FieldType.METRIC) { String dimensionName = fieldSpec.getName(); _numDimensions++;
FieldSpec fieldSpecInSchema = _schema.getFieldSpecFor(column); Preconditions.checkNotNull(fieldSpecInSchema); FieldSpec.FieldType fieldTypeInSchema = fieldSpecInSchema.getFieldType(); ColumnMetadata columnMetadata = _segmentMetadata.getColumnMetadataFor(column);
String.valueOf(PinotDataBitSet.getNumBitsPerValue(cardinality - 1))); properties.setProperty(getKeyFor(column, DICTIONARY_ELEMENT_SIZE), String.valueOf(dictionaryElementSize)); properties.setProperty(getKeyFor(column, COLUMN_TYPE), String.valueOf(fieldSpec.getFieldType())); properties.setProperty(getKeyFor(column, IS_SORTED), String.valueOf(columnIndexCreationInfo.isSorted())); properties.setProperty(getKeyFor(column, HAS_NULL_VALUE), String.valueOf(columnIndexCreationInfo.hasNulls())); if (fieldSpec.getFieldType().equals(FieldType.DATE_TIME)) { DateTimeFieldSpec dateTimeFieldSpec = (DateTimeFieldSpec) fieldSpec; properties.setProperty(V1Constants.MetadataKeys.Column.getKeyFor(column, DATETIME_FORMAT),
for (String column : noDictionaryColumns) { FieldSpec fieldSpec = dataSchema.getFieldSpecFor(column); if (fieldSpec.getFieldType().equals(FieldSpec.FieldType.METRIC)) { columnToCompressionType.put(column, ChunkCompressorFactory.CompressionType.PASS_THROUGH);