/** * INTERNAL: * Return the fields used to build the aggregate object. */ protected Vector<DatabaseField> getReferenceFields() { return getReferenceDescriptor().getAllFields(); }
/** * INTERNAL: * Rehash any hashtables based on fields. * This is used to clone descriptors for aggregates, which hammer field names. */ public void rehashFieldDependancies(AbstractSession session) { getReferenceDescriptor().rehashFieldDependancies(session); }
/** * INTERNAL: * Return if the mapping has any ownership or other dependency over its target object(s). */ public boolean hasDependency() { return getReferenceDescriptor().hasDependencyOnParts(); }
/** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once the cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is a very specific * behaviour for aggregate mappings. The original descriptor is used only for creating clones and * after that the aggregate mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void postInitialize(AbstractSession session) throws DescriptorException { super.postInitialize(session); if (getReferenceDescriptor() != null) { getReferenceDescriptor().postInitialize(session); } }
/** * INTERNAL: * Since aggregate object mappings clone their descriptors, for inheritance the correct child clone must be found. */ protected ClassDescriptor getReferenceDescriptor(Class theClass, AbstractSession session) { if (getReferenceDescriptor().getJavaClass().equals(theClass)) { return getReferenceDescriptor(); } ClassDescriptor subclassDescriptor = getReferenceDescriptor().getInheritancePolicy().getSubclassDescriptor(theClass); if (subclassDescriptor == null) { throw DescriptorException.noSubClassMatch(theClass, this); } else { return subclassDescriptor; } }
/** * INTERNAL: * Build and return a "template" database row with all the fields * set to null. */ protected AbstractRecord buildTemplateInsertRow(AbstractSession session) { AbstractRecord result = getReferenceDescriptor().getObjectBuilder().buildTemplateInsertRow(session); List processedMappings = (List)getReferenceDescriptor().getMappings().clone(); if (getReferenceDescriptor().hasInheritance()) { Enumeration children = getReferenceDescriptor().getInheritancePolicy().getChildDescriptors().elements(); while (children.hasMoreElements()) { Enumeration mappings = ((ClassDescriptor)children.nextElement()).getMappings().elements(); while (mappings.hasMoreElements()) { DatabaseMapping mapping = (DatabaseMapping)mappings.nextElement(); // Only write mappings once. if (!processedMappings.contains(mapping)) { mapping.writeInsertFieldsIntoRow(result, session); processedMappings.add(mapping); } } } } return result; }
/** * INTERNAL: * Add a primary key join column (secondary field). * If this contain primary keys and the descriptor(or its subclass) has multiple tables * (secondary tables or joined inheritance strategy), this should also know the primary key * join columns to handle some cases properly. */ public void addPrimaryKeyJoinField(DatabaseField primaryKeyField, DatabaseField secondaryField) { // now it doesn't need to manage this as a separate table here, // it's enough just to add the mapping to ObjectBuilder.mappingsByField ObjectBuilder builder = getReferenceDescriptor().getObjectBuilder(); DatabaseMapping mapping = builder.getMappingForField(primaryKeyField); if (mapping != null) { builder.getMappingsByField().put(secondaryField, mapping); } } }
/** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent Java value. */ public Class getFieldClassification(DatabaseField fieldToClassify) { DatabaseMapping mapping = getReferenceDescriptor().getObjectBuilder().getMappingForField(fieldToClassify); if (mapping == null) { return null;// Means that the mapping is read-only } return mapping.getFieldClassification(fieldToClassify); }
/** * Return the base mapping for the given DatabaseField. */ public DatabaseMapping getBaseMappingForField(DatabaseField databaseField) { DatabaseMapping mapping = getMappingForField(databaseField); // Drill down through the mappings until we get the direct mapping to the databaseField. while (mapping.isAggregateObjectMapping()) { mapping = ((AggregateObjectMapping)mapping).getReferenceDescriptor().getObjectBuilder().getMappingForField(databaseField); } return mapping; }
/** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression expression, Expression argument, AbstractSession session) { Expression attributeByAttributeComparison = null; //Enumeration mappingsEnum = getSourceToTargetKeyFields().elements(); Enumeration mappingsEnum = getReferenceDescriptor().getMappings().elements(); for (; mappingsEnum.hasMoreElements();) { DatabaseMapping mapping = (DatabaseMapping)mappingsEnum.nextElement(); String attributeName = mapping.getAttributeName(); Expression join = expression.get(attributeName).equal(argument.get(attributeName)); if (attributeByAttributeComparison == null) { attributeByAttributeComparison = join; } else { attributeByAttributeComparison = attributeByAttributeComparison.and(join); } } return attributeByAttributeComparison; }
/** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, IdentityHashtable visitedObjects){ //objects referenced by this mapping are not registered as they have // no identity, however mappings from the referenced object may need cascading. Object objectReferenced = getRealAttributeValueFromObject(object, uow); if ((objectReferenced == null)){ return ; } if ( ! visitedObjects.contains(objectReferenced)){ visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemove(objectReferenced, uow, visitedObjects); } }
/** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, IdentityHashtable visitedObjects){ //aggregate objects are not registered but their mappings should be. Object objectReferenced = getRealAttributeValueFromObject(object, uow); if ( (objectReferenced == null) ){ return ; } if ( ! visitedObjects.contains(objectReferenced)){ visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(objectReferenced, uow, visitedObjects); } }
ClassDescriptor referenceDescriptor = getReferenceDescriptor(); if ((value != null) && !referenceDescriptor.getJavaClass().isInstance(value)) { throw QueryException.incorrectClassForObjectComparison(expression, value, this);
/** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ public void buildClone(Object original, Object clone, UnitOfWorkImpl unitOfWork) { Object attributeValue = getAttributeValueFromObject(original); Object aggregateClone = buildClonePart(original, attributeValue, unitOfWork); if (aggregateClone != null) { ClassDescriptor descriptor = getReferenceDescriptor(aggregateClone, unitOfWork); descriptor.getObjectChangePolicy().setAggregateChangeListener(clone, aggregateClone, unitOfWork, descriptor, getAttributeName()); } setAttributeValueInObject(clone, aggregateClone); }
ObjectBuilder aggregateObjectBuilder = AggregateObjectMapping.class.cast(mapping).getReferenceDescriptor().getObjectBuilder();
/** * INTERNAL: * A combination of readFromRowIntoObject and buildClone. * <p> * buildClone assumes the attribute value exists on the original and can * simply be copied. * <p> * readFromRowIntoObject assumes that one is building an original. * <p> * Both of the above assumptions are false in this method, and actually * attempts to do both at the same time. * <p> * Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // This method is a combination of buildggregateFromRow and // buildClonePart on the super class. // none of buildClonePart used, as not an orignal new object, nor // do we worry about creating heavy clones for aggregate objects. Object clonedAttributeValue = buildAggregateFromRow(databaseRow, clone, joinManager, false, executionSession); ClassDescriptor descriptor = getReferenceDescriptor(clonedAttributeValue, unitOfWork); descriptor.getObjectChangePolicy().setAggregateChangeListener(clone, clonedAttributeValue, unitOfWork, descriptor, getAttributeName()); setAttributeValueInObject(clone, clonedAttributeValue); return; }
/** * INTERNAL: * Build and return a database row built with the values from * the specified attribute value. */ protected AbstractRecord buildRowFromAggregateWithChangeRecord(ChangeRecord changeRecord, ObjectChangeSet objectChangeSet, AbstractSession session, boolean forceWriteOfReadOnlyClasses) throws DescriptorException { if (objectChangeSet == null) { if (isNullAllowed()) { return buildNullReferenceRow(); } else { Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if ((!forceWriteOfReadOnlyClasses) && (session.isClassReadOnly(objectChangeSet.getClassType(session)))) { return new DatabaseRecord(1); } else { return getReferenceDescriptor(objectChangeSet.getClassType(session), session).getObjectBuilder().buildRowWithChangeSet(objectChangeSet, session); } } }
ClassDescriptor aggregateDescriptor = ((AggregateObjectMapping)mapping).getReferenceDescriptor(); if (aggregateChangeSet == null) { aggregateChangeSet = aggregateDescriptor.getObjectBuilder().createObjectChangeSet(aggregate, (oracle.toplink.essentials.internal.sessions.UnitOfWorkChangeSet)((UnitOfWorkImpl)getSession()).getUnitOfWorkChangeSet(), getSession());
/** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once the cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is a very specific * behaviour for aggregate mappings. The original descriptor is used only for creating clones and * after that the aggregate mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); ClassDescriptor clonedDescriptor = (ClassDescriptor)getReferenceDescriptor().clone(); if (clonedDescriptor.isChildDescriptor()) { ClassDescriptor parentDescriptor = session.getDescriptor(clonedDescriptor.getInheritancePolicy().getParentClass()); initializeParentInheritance(parentDescriptor, clonedDescriptor, session); } setReferenceDescriptor(clonedDescriptor); initializeReferenceDescriptor(clonedDescriptor); clonedDescriptor.preInitialize(session); clonedDescriptor.initialize(session); translateFields(clonedDescriptor, session); if (clonedDescriptor.hasInheritance() && clonedDescriptor.getInheritancePolicy().hasChildren()) { //clone child descriptors initializeChildInheritance(clonedDescriptor, session); } setFields(collectFields()); }
descriptor = ((AggregateObjectMapping)mapping).getReferenceDescriptor(); } else if (mapping.isForeignReferenceMapping()) { descriptor = ((ForeignReferenceMapping)mapping).getReferenceDescriptor();