private Filter constructFilter(String regex, boolean isEqual, FieldMapping fieldMapping) { byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); try { // To work we both HBase 0.94 and 0.96 we have to use reflection to construct a // SingleColumnValueFilter (and a RegexStringComparator) since // WritableByteArrayComparable (which RegexStringComparator extends) was renamed // to ByteArrayComparable in HBase 0.95 (HBASE-6658) Class<?> c = Class.forName("org.apache.hadoop.hbase.filter.SingleColumnValueFilter"); for (Constructor<?> cons : c.getConstructors()) { if (cons.getParameterTypes().length == 4 && !cons.getParameterTypes()[3].isArray()) { // not byte[] as the fourth arg Object regexStringComparator = Class.forName( "org.apache.hadoop.hbase.filter.RegexStringComparator") .getConstructor(String.class).newInstance(regex); return (Filter) cons.newInstance(family, qualifier, isEqual ? CompareFilter.CompareOp.EQUAL : CompareFilter.CompareOp.NOT_EQUAL, regexStringComparator); } } } catch (Exception e) { e.printStackTrace(); throw new DatasetException("Cannot create RegexEntityFilter.", e); } throw new DatasetException("Cannot create RegexEntityFilter (no constructor found)."); }
private Filter constructFilter(String regex, boolean isEqual, FieldMapping fieldMapping) { byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); try { // To work we both HBase 0.94 and 0.96 we have to use reflection to construct a // SingleColumnValueFilter (and a RegexStringComparator) since // WritableByteArrayComparable (which RegexStringComparator extends) was renamed // to ByteArrayComparable in HBase 0.95 (HBASE-6658) Class<?> c = Class.forName("org.apache.hadoop.hbase.filter.SingleColumnValueFilter"); for (Constructor<?> cons : c.getConstructors()) { if (cons.getParameterTypes().length == 4 && !cons.getParameterTypes()[3].isArray()) { // not byte[] as the fourth arg Object regexStringComparator = Class.forName( "org.apache.hadoop.hbase.filter.RegexStringComparator") .getConstructor(String.class).newInstance(regex); return (Filter) cons.newInstance(family, qualifier, isEqual ? CompareFilter.CompareOp.EQUAL : CompareFilter.CompareOp.NOT_EQUAL, regexStringComparator); } } } catch (Exception e) { e.printStackTrace(); throw new DatasetException("Cannot create RegexEntityFilter.", e); } throw new DatasetException("Cannot create RegexEntityFilter (no constructor found)."); }
public SingleFieldEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, Object filterValue, CompareFilter.CompareOp equalityOperator) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); byte[] comparisonBytes = entitySerDe.serializeColumnValueToBytes(fieldName, filterValue); this.filter = new SingleColumnValueFilter(family, qualifier, equalityOperator, comparisonBytes); }
public SingleFieldEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, Object filterValue, CompareFilter.CompareOp equalityOperator) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); byte[] comparisonBytes = entitySerDe.serializeColumnValueToBytes(fieldName, filterValue); this.filter = new SingleColumnValueFilter(family, qualifier, equalityOperator, comparisonBytes); }
@Override public Increment mapToIncrement(PartitionKey key, String fieldName, long amount) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping == null) { throw new DatasetException("Unknown field in the schema: " + fieldName); } if (fieldMapping.getMappingType() != MappingType.COUNTER) { throw new DatasetException("Field is not a counter type: " + fieldName); } byte[] keyBytes; if (keySerDe == null) { keyBytes = new byte[] { (byte) 0 }; } else { keyBytes = keySerDe.serialize(key); } Increment increment = new Increment(keyBytes); increment.addColumn(fieldMapping.getFamily(), fieldMapping.getQualifier(), amount); return increment; }
@Override public Increment mapToIncrement(PartitionKey key, String fieldName, long amount) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping == null) { throw new DatasetException("Unknown field in the schema: " + fieldName); } if (fieldMapping.getMappingType() != MappingType.COUNTER) { throw new DatasetException("Field is not a counter type: " + fieldName); } byte[] keyBytes; if (keySerDe == null) { keyBytes = new byte[] { (byte) 0 }; } else { keyBytes = keySerDe.serialize(key); } Increment increment = new Increment(keyBytes); increment.addColumn(fieldMapping.getFamily(), fieldMapping.getQualifier(), amount); return increment; }
|| fieldMapping.getMappingType() == MappingType.COUNTER) { serializeColumn(fieldName, fieldMapping.getFamily(), fieldMapping.getQualifier(), fieldValue, put); } else if (fieldMapping.getMappingType() == MappingType.KEY_AS_COLUMN) { serializeKeyAsColumn(fieldName, fieldMapping.getFamily(),
|| fieldMapping.getMappingType() == MappingType.COUNTER) { serializeColumn(fieldName, fieldMapping.getFamily(), fieldMapping.getQualifier(), fieldValue, put); } else if (fieldMapping.getMappingType() == MappingType.KEY_AS_COLUMN) { serializeKeyAsColumn(fieldName, fieldMapping.getFamily(),
/** * Deserialize an entity field from the HBase Result. * * @param fieldMapping * The FieldMapping that specifies this field's mapping type and * field name. * @param result * The HBase Result that represents a row in HBase. * @return The field Object we deserialized from the Result. */ public Object deserialize(FieldMapping fieldMapping, Result result) { String fieldName = fieldMapping.getFieldName(); MappingType mappingType = fieldMapping.getMappingType(); if (mappingType == MappingType.COLUMN || mappingType == MappingType.COUNTER) { return deserializeColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getQualifier(), result); } else if (mappingType == MappingType.KEY_AS_COLUMN) { return deserializeKeyAsColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getPrefix(), result); } else if (mappingType == MappingType.OCC_VERSION) { return deserializeOCCColumn(result); } else { throw new ValidationException( "Invalid field mapping for field with name: " + fieldName); } }
/** * Deserialize an entity field from the HBase Result. * * @param fieldMapping * The FieldMapping that specifies this field's mapping type and * field name. * @param result * The HBase Result that represents a row in HBase. * @return The field Object we deserialized from the Result. */ public Object deserialize(FieldMapping fieldMapping, Result result) { String fieldName = fieldMapping.getFieldName(); MappingType mappingType = fieldMapping.getMappingType(); if (mappingType == MappingType.COLUMN || mappingType == MappingType.COUNTER) { return deserializeColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getQualifier(), result); } else if (mappingType == MappingType.KEY_AS_COLUMN) { return deserializeKeyAsColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getPrefix(), result); } else if (mappingType == MappingType.OCC_VERSION) { return deserializeOCCColumn(result); } else { throw new ValidationException( "Invalid field mapping for field with name: " + fieldName); } }