/** * INTERNAL: * Return any objects have been deleted through database cascade delete constraints. */ public Set<Object> getCascadeDeleteObjects() { if (this.cascadeDeleteObjects == null) { this.cascadeDeleteObjects = new IdentityHashSet(); } return this.cascadeDeleteObjects; }
/** * Constructs a new <tt>IdentityHashSet</tt> with the same contents * as the given <tt>Collection</tt>. The new <tt>IdentityHashSet</tt> * is created with an initial capacity sufficient to hold the elements of * the given <tt>Collection</tt>. * * @param c the <tt>Collection</tt> whose contents are to be placed in the * new <tt>IdentityHashSet</tt>. */ public IdentityHashSet(Collection c) { this(Math.max((int)(c.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); addAll(c); }
/** * Deserialize the <tt>IdentityHashSet</tt> from a stream. */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultReadObject(); // Read in number of buckets and allocate the bucket array; int numBuckets = s.readInt(); entries = new Entry[numBuckets]; // Read in size (count) int size = s.readInt(); // Read the objects and add to the IdentityHashSet for (int i = 0; i < size; i++) { add(s.readObject()); } } }
/** * Serialize the state of this <tt>IdentityHashSet</tt> to a stream. * * @serialData The <i>capacity</i> of the <tt>IdentityHashSet</tt> * (the length of the bucket array) is emitted (int), followed by the * <i>size</i> of the <tt>IdentityHashSet</tt>, followed by the * contents (in no particular order). */ private void writeObject(ObjectOutputStream s) throws IOException, ClassNotFoundException { // Write out the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultWriteObject(); // Write out number of buckets s.writeInt(entries.length); // Write out count s.writeInt(count); // Write out contents for (Iterator i = iterator(); i.hasNext();) { s.writeObject(i.next()); } }
rehash(); copyOfEntries = entries; index = (hash & 0x7FFFFFFF) % copyOfEntries.length;
/** * Deserialize the <tt>IdentityHashSet</tt> from a stream. */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultReadObject(); // Read in number of buckets and allocate the bucket array; int numBuckets = s.readInt(); entries = new Entry[numBuckets]; // Read in size (count) int size = s.readInt(); // Read the objects and add to the IdentityHashSet for (int i = 0; i < size; i++) { add(s.readObject()); } } }
/** * Serialize the state of this <tt>IdentityHashSet</tt> to a stream. * * @serialData The <i>capacity</i> of the <tt>IdentityHashSet</tt> * (the length of the bucket array) is emitted (int), followed by the * <i>size</i> of the <tt>IdentityHashSet</tt>, followed by the * contents (in no particular order). */ private void writeObject(ObjectOutputStream s) throws IOException, ClassNotFoundException { // Write out the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultWriteObject(); // Write out number of buckets s.writeInt(entries.length); // Write out count s.writeInt(count); // Write out contents for (Iterator i = iterator(); i.hasNext();) { s.writeObject(i.next()); } }
rehash(); copyOfEntries = entries; index = (hash & 0x7FFFFFFF) % copyOfEntries.length;
/** * INTERNAL: * Return the list of object with changes. * This is used in weak reference mode to avoid garbage collection of changed objects. */ public Set<Object> getChangeTrackedHardList() { if (this.changeTrackedHardList == null) { this.changeTrackedHardList = new IdentityHashSet(); } return this.changeTrackedHardList; }
/** * Constructs a new <tt>IdentityHashSet</tt> with the same contents * as the given <tt>Collection</tt>. The new <tt>IdentityHashSet</tt> * is created with an initial capacity sufficient to hold the elements of * the given <tt>Collection</tt>. * * @param c the <tt>Collection</tt> whose contents are to be placed in the * new <tt>IdentityHashSet</tt>. */ public IdentityHashSet(Collection c) { this(Math.max((int)(c.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); addAll(c); }
/** * Deserialize the <tt>IdentityHashSet</tt> from a stream. */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultReadObject(); // Read in number of buckets and allocate the bucket array; int numBuckets = s.readInt(); entries = new Entry[numBuckets]; // Read in size (count) int size = s.readInt(); // Read the objects and add to the IdentityHashSet for (int i = 0; i < size; i++) { add(s.readObject()); } } }
/** * Serialize the state of this <tt>IdentityHashSet</tt> to a stream. * * @serialData The <i>capacity</i> of the <tt>IdentityHashSet</tt> * (the length of the bucket array) is emitted (int), followed by the * <i>size</i> of the <tt>IdentityHashSet</tt>, followed by the * contents (in no particular order). */ private void writeObject(ObjectOutputStream s) throws IOException, ClassNotFoundException { // Write out the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultWriteObject(); // Write out number of buckets s.writeInt(entries.length); // Write out count s.writeInt(count); // Write out contents for (Iterator i = iterator(); i.hasNext();) { s.writeObject(i.next()); } }
rehash(); copyOfEntries = entries; index = (hash & 0x7FFFFFFF) % copyOfEntries.length;
/** * INTERNAL: * Return any objects have been deleted through database cascade delete constraints. */ public Set<Object> getCascadeDeleteObjects() { if (this.cascadeDeleteObjects == null) { this.cascadeDeleteObjects = new IdentityHashSet(); } return this.cascadeDeleteObjects; }
/** * Constructs a new <tt>IdentityHashSet</tt> with the same contents * as the given <tt>Collection</tt>. The new <tt>IdentityHashSet</tt> * is created with an initial capacity sufficient to hold the elements of * the given <tt>Collection</tt>. * * @param c the <tt>Collection</tt> whose contents are to be placed in the * new <tt>IdentityHashSet</tt>. */ public IdentityHashSet(Collection c) { this(Math.max((int)(c.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); addAll(c); }
public Set getObjectsInLoading() { if (objectsInLoading == null) { objectsInLoading = new IdentityHashSet(); } return objectsInLoading; }
/** * INTERNAL: * Return the list of object with changes. * This is used in weak reference mode to avoid garbage collection of changed objects. */ public Set<Object> getChangeTrackedHardList() { if (this.changeTrackedHardList == null) { this.changeTrackedHardList = new IdentityHashSet(); } return this.changeTrackedHardList; }
/** * INTERNAL: * Return the list of object with changes. * This is used in weak reference mode to avoid garbage collection of changed objects. */ public Set<Object> getChangeTrackedHardList() { if (this.changeTrackedHardList == null) { this.changeTrackedHardList = new IdentityHashSet(); } return this.changeTrackedHardList; }