/** * Compose and add. * * @param entity * the entity * @param cqlMetadata * the cql metadata * @param thriftColumnValue * the thrift column value * @param thriftColumnName * the thrift column name */ private void composeAndAdd(HashMap entity, CqlMetadata cqlMetadata, Object thriftColumnValue, String thriftColumnName) { byte[] columnName = thriftColumnName.getBytes(); Map<ByteBuffer, String> schemaTypes = this.clientBase.getCqlMetadata().getValue_types(); AbstractType<?> type = null; try { type = TypeParser.parse(schemaTypes.get(ByteBuffer.wrap((byte[]) columnName))); } catch (SyntaxException | ConfigurationException e) { log.error(e.getMessage()); throw new KunderaException("Error while parsing CQL Type " + e); } entity.put(thriftColumnName, type.compose(ByteBuffer.wrap((byte[]) thriftColumnValue))); }
/** * Compose column value. * * @param cqlMetadata * the cql metadata * @param thriftColumnValue * the thrift column value * @param thriftColumnName * the thrift column name * @return the object */ private Object composeColumnValue(CqlMetadata cqlMetadata, byte[] thriftColumnValue, byte[] thriftColumnName) { Map<ByteBuffer, String> schemaTypes = cqlMetadata.getValue_types(); AbstractType<?> type = null; try { type = TypeParser.parse(schemaTypes.get(ByteBuffer.wrap(thriftColumnName))); } catch (SyntaxException | ConfigurationException ex) { log.error(ex.getMessage()); throw new KunderaException("Error while deserializing column value " + ex); } if (type.isCollection()) { return ((CollectionSerializer) type.getSerializer()) .deserializeForNativeProtocol(ByteBuffer.wrap(thriftColumnValue), ProtocolVersion.V2); } return type.compose(ByteBuffer.wrap(thriftColumnValue)); }
public void addInput(ProtocolVersion protocolVersion, List<ByteBuffer> values) { ByteBuffer value = values.get(0); if (value == null) return; double number = ((Number) numberType.compose(value)).doubleValue(); simpleSum += number; double tmp = number - compensation; double rounded = sum + tmp; compensation = (rounded - sum) - tmp; sum = rounded; }
public void addInput(ProtocolVersion protocolVersion, List<ByteBuffer> values) { ByteBuffer value = values.get(0); if (value == null) return; double number = ((Number) numberType.compose(value)).doubleValue(); simpleSum += number; double tmp = number - compensation; double rounded = sum + tmp; compensation = (rounded - sum) - tmp; sum = rounded; }
public Field field(String name, AbstractType type, ByteBuffer byteBufferValue, FieldType fieldType) { Object value = type.compose(byteBufferValue); return new DoubleField(name, ((Number) value).doubleValue(), fieldType); } }
protected I compose(ByteBuffer bb) { return inputType().compose(bb); } }
protected I compose(ByteBuffer bb) { return inputType().compose(bb); } }
protected I compose(ByteBuffer bb) { return inputType().compose(bb); } }
public Field field(String name, AbstractType type, ByteBuffer byteBufferValue, FieldType fieldType) { Object value = type.compose(byteBufferValue); return new IntField(name, (Integer) value, fieldType); } }
public Field field(String name, AbstractType type, ByteBuffer byteBufferValue, FieldType fieldType) { Object value = type.compose(byteBufferValue); return new FloatField(name, ((Number) value).floatValue(), fieldType); } }
public Field field(String name, AbstractType type, ByteBuffer byteBufferValue, FieldType fieldType) { Object value = type.compose(byteBufferValue); return new LongField(name, ((Date) value).getTime(), fieldType); } }
public Field field(String name, AbstractType type, ByteBuffer byteBufferValue, FieldType fieldType) { Object value = type.compose(byteBufferValue); return new Field(name, UUIDType.instance.getSerializer().toString((UUID) value), fieldType); } }
/** * Returns the specified token as a {@link java.lang.Comparable}. * * @param tokenAsString a token represented as a {@link java.lang.String}. * @return the specified token as a {@link java.lang.Comparable}. */ @SuppressWarnings("unchecked") public Comparable tokenAsComparable(String tokenAsString) { Token token = tokenFactory.fromString(tokenAsString); ByteBuffer bb = tokenFactory.toByteArray(token); return (Comparable) tokenType.compose(bb); }
protected Object cassandraToObj(AbstractType validator, ByteBuffer value) { if (validator instanceof DecimalType || validator instanceof InetAddressType) return validator.getString(value); if (validator instanceof CollectionType) { // For CollectionType, the compose() method assumes the v3 protocol format of collection, which // is not correct here since we query using the CQL-over-thrift interface which use the pre-v3 format return ((CollectionSerializer)validator.getSerializer()).deserializeForNativeProtocol(value, nativeProtocolVersion); } return validator.compose(value); }
public RowIterator() { AbstractType type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
public RowIterator() { AbstractType type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
public RowIterator() { AbstractType type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
public RowIterator() { AbstractType type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
DistinctKeyIterator() { AbstractType type = partitioner.getTokenValidator(); Object startToken = type.compose(type.fromString(split.getStartToken())); Object endToken = type.compose(type.fromString(split.getEndToken())); SimpleStatement statement = new SimpleStatement(cqlQuery, startToken, endToken); rowIterator = session.execute(statement).iterator(); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) { partitionBoundColumns.put(meta.getName(), Boolean.TRUE); } }
DistinctKeyIterator() { AbstractType type = partitioner.getTokenValidator(); Object startToken = type.compose(type.fromString(split.getStartToken())); Object endToken = type.compose(type.fromString(split.getEndToken())); SimpleStatement statement = new SimpleStatement(cqlQuery, startToken, endToken); rowIterator = session.execute(statement).iterator(); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) { partitionBoundColumns.put(meta.getName(), Boolean.TRUE); } }