public Long convert(Bin source) { return source.value.toLong(); } }
private K prepareKey(Key key){ switch (keyType) { case KEY: return (K) key; case LONG: return (K) (Long) key.userKey.toLong(); case STRING: return (K) key.userKey.toString(); default: throw new IllegalStateException("Error: unsupported key type. Must be one of: Key, Long or String"); // should not happen } } }
private Filter collectionContains(IndexCollectionType collectionType) { Value val = getValue1(); int valType = val.getType(); switch (valType) { case ParticleType.INTEGER: return Filter.contains(getField(), collectionType, val.toLong()); case ParticleType.STRING: return Filter.contains(getField(), collectionType, val.toString()); } return null; }
break; case LONG: Long longKey = key.userKey.toLong(); result.put((K) longKey, object); break;
/** * Performs common mapping task when loading an entity ... used in loaders * see {@link com.spikeify.commands.SingleLoader} {@link com.spikeify.commands.MultiLoader} {@link com.spikeify.commands.ScanLoader} * @param mapper to be used * @param key record key * @param record record value * @param object object holding data */ public static void map(ClassMapper mapper, Key key, Record record, Object object) { // set UserKey field switch (key.userKey.getType()) { case ParticleType.STRING: mapper.setUserKey(object, key.userKey.toString()); break; case ParticleType.INTEGER: mapper.setUserKey(object, key.userKey.toLong()); break; } // set meta-fields on the entity: @Namespace, @SetName, @Expiration.. mapper.setMetaFieldValues(object, key.namespace, key.setName, record.generation, record.expiration); // set field values mapper.setFieldValues(object, record.bins); } }
private PredExp[] valToPredExp(Value val) throws PredExpException { int valType = val.getType(); switch (valType) { case ParticleType.INTEGER: return new PredExp[]{ getFieldExpr(valType), PredExp.integerValue(val.toLong())}; case ParticleType.STRING: return new PredExp[]{ getFieldExpr(valType), PredExp.stringValue(val.toString())}; case ParticleType.GEOJSON: return new PredExp[]{ getFieldExpr(valType), PredExp.geoJSONValue(val.toString())}; default: throw new PredExpException("PredExp Unsupported Particle Type: " + val.getType()); } }
case EQ: if (getValue1().getType() == ParticleType.INTEGER) return Filter.equal(getField(), getValue1().toLong()); else return Filter.equal(getField(), getValue1().toString()); case GTEQ: case BETWEEN: return Filter.range(getField(), getValue1().toLong(), getValue2()==null?Long.MAX_VALUE:getValue2().toLong()); case GT: return Filter.range(getField(), getValue1().toLong()+1, getValue2()==null?Long.MAX_VALUE:getValue2().toLong()); case LT: return Filter.range(getField(), Long.MIN_VALUE, getValue1().toLong()-1); case LTEQ: return Filter.range(getField(), Long.MIN_VALUE, getValue1().toLong()+1); case LIST_CONTAINS: return collectionContains(IndexCollectionType.LIST);
case ParticleType.INTEGER: rs.add(getFieldExpr(valType)); rs.add(PredExp.integerValue(val.toLong())); rs.add(PredExp.integerEqual()); break;
private T getObject() { Record record = recordSet.getRecord(); Key key = recordSet.getKey(); // construct the entity object via provided ClassConstructor T object = classConstructor.construct(mapper.getType()); // save record hash into cache - used later for differential updating recordsCache.insert(key, record.bins); // set UserKey field switch (key.userKey.getType()) { case ParticleType.STRING: mapper.setUserKey(object, key.userKey.toString()); break; case ParticleType.INTEGER: mapper.setUserKey(object, key.userKey.toLong()); break; } // set meta-fields on the entity: @Namespace, @SetName, @Expiration.. mapper.setMetaFieldValues(object, key.namespace, key.setName, record.generation, record.expiration); // set field values mapper.setFieldValues(object, record.bins); // set LDT fields mapper.setBigDatatypeFields(object, asynClient, key); return object; }
break; case ParticleType.INTEGER: existingRec.userKeyLong = key.userKey.toLong(); break;