@Override protected DBRef asReferenceKey(Class<?> entity, Object id) { String collection = morphia.getMapper().getCollectionName(entity); Key<?> key = new Key<Object>(entity, collection, id); return morphia.getMapper().keyToDBRef(key); } }
/** * Creates a Key for a type and an ID value * * @param type the Class of the entity * @param id the ID value * @param <T> the type of the entity * @return the Key */ public <T> Key<T> manualRefToKey(final Class<T> type, final Object id) { return id == null ? null : new Key<T>(type, getCollectionName(type), id); }
<T> Key<T> createKey(final Class<T> clazz, final Serializable id) { return new Key<T>(clazz, getCollectionName(clazz), id); }
private <T> MongoCollection<T> getMongoCollection(final Class<T> clazz) { return getMongoCollection(mapper.getCollectionName(clazz), clazz); }
/** * Gets the Keys for a list of objects * * @param clazz the Class of the objects * @param refs the objects to fetch the keys for * @param <T> the type of the entity * @return the list of Keys */ public <T> List<Key<T>> getKeysByManualRefs(final Class<T> clazz, final List<Object> refs) { final String collection = getCollectionName(clazz); final List<Key<T>> keys = new ArrayList<Key<T>>(refs.size()); for (final Object ref : refs) { keys.add(this.<T>manualRefToKey(collection, ref)); } return keys; }
@Override public DBCollection getCollection(final Class clazz) { final String collName = mapper.getCollectionName(clazz); return getDB().getCollection(collName); }
@Override public <T> T getByKey(final Class<T> clazz, final Key<T> key) { final String collectionName = mapper.getCollectionName(clazz); final String keyCollection = mapper.updateCollection(key); if (!collectionName.equals(keyCollection)) { throw new RuntimeException("collection names don't match for key and class: " + collectionName + " != " + keyCollection); } Object id = key.getId(); if (id instanceof DBObject) { ((DBObject) id).removeField(Mapper.CLASS_NAME_FIELDNAME); } return get(clazz, id); }
/** * Gets the Key for an entity * * @param entity the entity to process * @param <T> the type of the entity * @return the Key */ public <T> Key<T> getKey(final T entity) { T unwrapped = entity; if (unwrapped instanceof ProxiedEntityReference) { final ProxiedEntityReference proxy = (ProxiedEntityReference) unwrapped; return (Key<T>) proxy.__getKey(); } unwrapped = ProxyHelper.unwrap(unwrapped); if (unwrapped instanceof Key) { return (Key<T>) unwrapped; } final Object id = getId(unwrapped); final Class<T> aClass = (Class<T>) unwrapped.getClass(); return id == null ? null : new Key<T>(aClass, getCollectionName(aClass), id); }
<T> Key<T> createKey(final Class<T> clazz, final Object id) { if (id instanceof Serializable) { return createKey(clazz, (Serializable) id); } //TODO: cache the encoders, maybe use the pool version of the buffer that the driver does. final BSONEncoder enc = new BasicBSONEncoder(); return new Key<T>(clazz, getCollectionName(clazz), enc.encode(toDBObject(id))); }
/** * Converts a Key to a DBRef * * @param key the Key to convert * @return the DBRef */ public DBRef keyToDBRef(final Key key) { if (key == null) { return null; } if (key.getType() == null && key.getCollection() == null) { throw new IllegalStateException("How can it be missing both?"); } if (key.getCollection() == null) { key.setCollection(getCollectionName(key.getType())); } Object id = key.getId(); if (isMapped(id.getClass())) { id = toMongoObject(id, true); } return new DBRef(key.getCollection(), id); }
@Override @Deprecated public <T> MapreduceResults<T> mapReduce(final MapreduceType type, final Query query, final String map, final String reduce, final String finalize, final Map<String, Object> scopeFields, final Class<T> outputType) { final DBCollection dbColl = query.getCollection(); final String outColl = mapper.getCollectionName(outputType); final MapReduceCommand cmd = new MapReduceCommand(dbColl, map, reduce, outColl, type.toOutputType(), query.getQueryObject()); if (query.getLimit() > 0) { cmd.setLimit(query.getLimit()); } if (query.getSortObject() != null) { cmd.setSort(query.getSortObject()); } if (finalize != null && finalize.length() != 0) { cmd.setFinalize(finalize); } if (scopeFields != null && !scopeFields.isEmpty()) { cmd.setScope(scopeFields); } return mapReduce(type, query, outputType, cmd); }
final Key<T> key = new Key(entity.getClass(), getCollectionName(entity.getClass()), dbObject.get(ID_KEY)); final T cachedInstance = cache.getEntity(key); if (cachedInstance != null) { final Key key = new Key(entity.getClass(), getCollectionName(entity.getClass()), updated.get(ID_KEY)); cache.putEntity(key, entity);
@Override public void ensureCaps() { for (final MappedClass mc : mapper.getMappedClasses()) { if (mc.getEntityAnnotation() != null && mc.getEntityAnnotation().cap().value() > 0) { final CappedAt cap = mc.getEntityAnnotation().cap(); final String collName = mapper.getCollectionName(mc.getClazz()); final BasicDBObjectBuilder dbCapOpts = start("capped", true); if (cap.value() > 0) { dbCapOpts.add("size", cap.value()); } if (cap.count() > 0) { dbCapOpts.add("max", cap.count()); } final DB database = getDB(); if (database.getCollectionNames().contains(collName)) { final DBObject dbResult = database.command(start("collstats", collName).get()); if (dbResult.containsField("capped")) { LOG.debug("DBCollection already exists and is capped already; doing nothing. " + dbResult); } else { LOG.warning("DBCollection already exists with same name(" + collName + ") and is not capped; not creating capped version!"); } } else { getDB().createCollection(collName, dbCapOpts.get()); LOG.debug("Created capped DBCollection (" + collName + ") with opts " + dbCapOpts); } } } }