/** * Creates a new instance. * * @param serializationFactory * The factory defining the object serialisation implementation. * @param comparator * The comparator to be used for sorting the results. * @param useCompression * If true, the storage files will be compressed. */ public FileBasedSort( ObjectSerializationFactory serializationFactory, Comparator<T> comparator, boolean useCompression) { this.serializationFactory = serializationFactory; this.comparator = comparator; this.useCompression = useCompression; chunkedEntityStore = new ChunkedObjectStore<T>(serializationFactory, "emta", "idx", useCompression); addBuffer = new ArrayList<T>(MAX_MEMORY_SORT_COUNT); }
/** * Sorts the data currently in the add buffer, writes it to the object * store, and clears the buffer. */ private void flushAddBuffer() { if (addBuffer.size() >= 0) { // Sort the chunk prior to writing. Collections.sort(addBuffer, comparator); // Write all entities in the buffer to entity storage. for (T entity : addBuffer) { chunkedEntityStore.add(entity); } addBuffer.clear(); // Close the chunk in the underlying data store so that it can be // read separately. chunkedEntityStore.closeChunk(); } }
/** * {@inheritDoc} */ public void close() { chunkedEntityStore.close(); } }
/** * Sorts and returns the contents of the sorter. * * @return An iterator providing access to the sorted entities. */ public ReleasableIterator<T> iterate() { flushAddBuffer(); return iterate(0, 0, chunkedEntityStore.getChunkCount()); }
/** * {@inheritDoc} */ @Override public void complete() { // Any outstanding chunks must be closed before we can complete. closeChunk(); indexStore.complete(); }
/** * Provides access to the contents of this store. * * @param chunk * The chunk to read objects from. * @return An iterator providing access to contents of the store. */ public ReleasableIterator<T> iterate(long chunk) { complete(); if (indexStoreReader == null) { indexStoreReader = indexStore.createReader(); } // Retrieve the file position and number of objects for the specified // chunk and iterate. return objectStore.iterate( indexStoreReader.get(chunk * 2).getValue(), indexStoreReader.get(chunk * 2 + 1).getValue() ); }
for (int i = 0; i < chunkCount; i++) { sources.add( chunkedEntityStore.iterate(beginChunkIndex + i) );
/** * Sorts and returns the contents of the sorter. * * @return An iterator providing access to the sorted entities. */ public ReleasableIterator<T> iterate() { flushAddBuffer(); return iterate(0, 0, chunkedEntityStore.getChunkCount()); }
/** * {@inheritDoc} */ @Override public void complete() { // Any outstanding chunks must be closed before we can complete. closeChunk(); indexStore.complete(); }
/** * Provides access to the contents of this store. * * @param chunk * The chunk to read objects from. * @return An iterator providing access to contents of the store. */ public ReleasableIterator<T> iterate(long chunk) { complete(); if (indexStoreReader == null) { indexStoreReader = indexStore.createReader(); } // Retrieve the file position and number of objects for the specified // chunk and iterate. return objectStore.iterate( indexStoreReader.get(chunk * 2).getValue(), indexStoreReader.get(chunk * 2 + 1).getValue() ); }
for (int i = 0; i < chunkCount; i++) { sources.add( chunkedEntityStore.iterate(beginChunkIndex + i) );
/** * Sorts the data currently in the add buffer, writes it to the object * store, and clears the buffer. */ private void flushAddBuffer() { if (addBuffer.size() >= 0) { // Sort the chunk prior to writing. Collections.sort(addBuffer, comparator); // Write all entities in the buffer to entity storage. for (T entity : addBuffer) { chunkedEntityStore.add(entity); } addBuffer.clear(); // Close the chunk in the underlying data store so that it can be // read separately. chunkedEntityStore.closeChunk(); } }
/** * {@inheritDoc} */ public void close() { chunkedEntityStore.close(); } }
/** * Creates a new instance. * * @param serializationFactory * The factory defining the object serialisation implementation. * @param comparator * The comparator to be used for sorting the results. * @param useCompression * If true, the storage files will be compressed. */ public FileBasedSort( ObjectSerializationFactory serializationFactory, Comparator<T> comparator, boolean useCompression) { this.serializationFactory = serializationFactory; this.comparator = comparator; this.useCompression = useCompression; chunkedEntityStore = new ChunkedObjectStore<T>(serializationFactory, "emta", "idx", useCompression); addBuffer = new ArrayList<T>(MAX_MEMORY_SORT_COUNT); }