/** * Creates a new big-array big list with given capacity. * * @param capacity * the initial capacity of the array list (may be 0). */ public IntBigArrayBigList(final long capacity) { if (capacity < 0) throw new IllegalArgumentException("Initial capacity (" + capacity + ") is negative"); if (capacity == 0) a = IntBigArrays.EMPTY_BIG_ARRAY; else a = IntBigArrays.newBigArray(capacity); } /**
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); a = IntBigArrays.newBigArray(size); for (int i = 0; i < size; i++) IntBigArrays.set(a, i, s.readInt()); } }
/** * Turns a standard array into a big array. * * <p> * Note that the returned big array might contain as a segment the original * array. * * @param array * an array. * @return a new big array with the same length and content of {@code array}. */ public static int[][] wrap(final int[] array) { if (array.length == 0) return EMPTY_BIG_ARRAY; if (array.length <= SEGMENT_SIZE) return new int[][]{array}; final int[][] bigArray = newBigArray(array.length); for (int i = 0; i < bigArray.length; i++) System.arraycopy(array, (int) start(i), bigArray[i], 0, bigArray[i].length); return bigArray; } /**
/** Loads elements from a file given by a {@link File} object, storing them in a new big array. * * <p>Note that the length of the returned big array will be computed * dividing the specified file size by the number of bytes used to * represent each element. * * @param file a file. * @return a big array filled with the content of the specified file. */ public static int[][] loadIntsBig(final File file) throws IOException { final FileInputStream fis = new FileInputStream(file); final long length = fis.getChannel().size() / (Integer.SIZE / 8); final int[][] array = IntBigArrays.newBigArray(length); final DataInputStream dis = new DataInputStream(new FastBufferedInputStream(fis)); for(int i = 0; i < array.length; i++) { final int[] t = array[i]; final int l = t.length; for(int d = 0; d < l; d++) t[d] = dis.readInt(); } dis.close(); return array; } /** Loads elements from a file given by a filename, storing them in a new big array.
public static void main(final String arg[]) { int[][] a = IntBigArrays.newBigArray(1L << Integer.parseInt(arg[0])); long x, y, z, start;
/** Utility method to load a compressed size file into a list. * * @param filename the filename containing the γ-coded sizes (see {@link BitStreamIndexWriter}). * @param N the number of documents. * @return a list of integers backed by an array. */ public static IntBigList readSizes( final CharSequence filename, final long N ) throws IOException { final int[][] size = IntBigArrays.newBigArray( N ); final InputBitStream in = new InputBitStream( filename.toString() ); LOGGER.debug( "Loading sizes..." ); for( int segment = 0; segment < size.length; segment++ ) in.readGammas( size[ segment ], size[ segment ].length ); LOGGER.debug( "Completed." ); in.close(); return IntBigArrayBigList.wrap( size ); }
/** * Returns a copy of a portion of a big array. * * @param array * a big array. * @param offset * the first element to copy. * @param length * the number of elements to copy. * @return a new big array containing {@code length} elements of {@code array} * starting at {@code offset}. */ public static int[][] copy(final int[][] array, final long offset, final long length) { ensureOffsetLength(array, offset, length); final int[][] a = newBigArray(length); copy(array, offset, a, 0, length); return a; } /**
/** * Creates a new hash big set. * * <p> * The actual table size will be the least power of two greater than * {@code expected}/{@code f}. * * @param expected * the expected number of elements in the set. * @param f * the load factor. */ public IntOpenHashBigSet(final long expected, final float f) { if (f <= 0 || f > 1) throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than or equal to 1"); if (n < 0) throw new IllegalArgumentException("The expected number of elements must be nonnegative"); this.f = f; minN = n = bigArraySize(expected, f); maxFill = maxFill(n, f); key = IntBigArrays.newBigArray(n); initMasks(); } /**
protected int combineSizes( final OutputBitStream sizesOutputBitStream ) throws IOException { int maxDocSize = 0, currDoc = 0; if ( needsSizes ) size = IntBigArrays.newBigArray( numberOfDocuments ); for( int i = 0; i < numIndices; i++ ) { final IntIterator sizes = sizes( i ); int s = 0; long j = index[ i ].numberOfDocuments; while( j-- != 0 ) { maxDocSize = Math.max( maxDocSize, s = sizes.nextInt() ); if ( needsSizes ) IntBigArrays.set( size, currDoc++, s ); sizesOutputBitStream.writeGamma( s ); } if ( sizes instanceof Closeable ) ((Closeable)sizes).close(); } return maxDocSize; }
protected int combineSizes( final OutputBitStream sizesOutputBitStream ) throws IOException { int maxDocSize = 0, currDoc = 0; if ( needsSizes ) size = IntBigArrays.newBigArray( numberOfDocuments ); for( int i = 0; i < numIndices; i++ ) { final IntIterator sizes = sizes( i ); int s = 0; long j = index[ i ].numberOfDocuments; while( j-- != 0 ) { maxDocSize = Math.max( maxDocSize, s = sizes.nextInt() ); if ( needsSizes ) IntBigArrays.set( size, currDoc++, s ); sizesOutputBitStream.writeGamma( s ); } if ( sizes instanceof Closeable ) ((Closeable)sizes).close(); } return maxDocSize; }
/** Utility method to load a compressed size file into a list. * * @param ioFactory the factory that will be used to perform I/O. * @param filename the file containing the γ-coded sizes (see {@link BitStreamIndexWriter}). * @param n the number of documents. * @return a list of integers backed by an array. */ public static IntBigArrayBigList readSizes( final IOFactory ioFactory, final CharSequence filename, final long n ) throws IOException { final int[][] size = IntBigArrays.newBigArray( n ); final InputBitStream in = new InputBitStream( ioFactory.getInputStream( filename.toString() ), false ); LOGGER.debug( "Loading sizes..." ); for( int segment = 0; segment < size.length; segment++ ) in.readGammas( size[ segment ], size[ segment ].length ); LOGGER.debug( "Completed." ); in.close(); return IntBigArrayBigList.wrap( size ); }
protected int combineSizes( final OutputBitStream sizesOutputBitStream ) throws IOException { int curSize, s, maxDocSize = 0; if ( needsSizes ) size = IntBigArrays.newBigArray( numberOfDocuments ); final IntIterator[] sizes = new IntIterator[ numIndices ]; for( int i = 0; i < numIndices; i++ ) sizes[ i ] = sizes( i ); for( int d = 0; d < numberOfDocuments; d++ ) { curSize = 0; for( int i = 0; i < numIndices; i++ ) { if ( d < index[ i ].numberOfDocuments && ( s = sizes[ i ].nextInt() ) != 0 ) { if ( curSize != 0 ) throw new IllegalArgumentException( "Document " + d + " has nonzero length in two indices" ); curSize = s; } } if ( needsSizes ) IntBigArrays.set( size, d, curSize ); if ( curSize > maxDocSize ) maxDocSize = curSize; sizesOutputBitStream.writeGamma( curSize ); } for( int i = 0; i < numIndices; i++ ) if ( sizes[ i ] instanceof Closeable ) ((Closeable)sizes[ i ]).close(); return maxDocSize; }
protected int combineSizes( final OutputBitStream sizesOutputBitStream ) throws IOException { int curSize, s, maxDocSize = 0; if ( needsSizes ) size = IntBigArrays.newBigArray( numberOfDocuments ); final IntIterator[] sizes = new IntIterator[ numIndices ]; for( int i = 0; i < numIndices; i++ ) sizes[ i ] = sizes( i ); for( int d = 0; d < numberOfDocuments; d++ ) { curSize = 0; for( int i = 0; i < numIndices; i++ ) { if ( d < index[ i ].numberOfDocuments && ( s = sizes[ i ].nextInt() ) != 0 ) { if ( curSize != 0 ) throw new IllegalArgumentException( "Document " + d + " has nonzero length in two indices" ); curSize = s; } } if ( needsSizes ) IntBigArrays.set( size, d, curSize ); if ( curSize > maxDocSize ) maxDocSize = curSize; sizesOutputBitStream.writeGamma( curSize ); } for( int i = 0; i < numIndices; i++ ) if ( sizes[ i ] instanceof Closeable ) ((Closeable)sizes[ i ]).close(); return maxDocSize; }
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = bigArraySize(size, f); maxFill = maxFill(n, f); final int[][] key = this.key = IntBigArrays.newBigArray(n); initMasks(); long h; int k; int base, displ; for (long i = size; i-- != 0;) { k = s.readInt(); if (((k) == (0))) containsNull = true; else { h = (it.unimi.dsi.fastutil.HashCommon.mix((long) ((k)))); if (!((key[base = (int) ((h & mask) >>> BigArrays.SEGMENT_SHIFT)][displ = (int) (h & segmentMask)]) == (0))) while (!((key[base = (base + ((displ = (displ + 1) & segmentMask) == 0 ? 1 : 0)) & baseMask][displ]) == (0))); key[base][displ] = k; } } if (ASSERTS) checkTable(); } private void checkTable() {
/** * Creates a new hash big set. * * <p>The actual table size will be the least power of two greater than * <code>expected</code>/<code>f</code>. * * @param expected the expected number of elements in the set. * @param f the load factor. */ public Long2IntOpenHashBigMap(final long expected, final float f) { if (f <= 0 || f > 1) throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than or equal to 1"); if (n < 0) throw new IllegalArgumentException("The expected number of elements must be nonnegative"); this.f = f; n = bigArraySize(expected, f); maxFill = maxFill(n, f); key = LongBigArrays.newBigArray(n); value = IntBigArrays.newBigArray(n); used = BooleanBigArrays.newBigArray(n); initMasks(); }
size = IntBigArrays.newBigArray( numberOfDocuments ); for( int i = 0; i < numIndices; i++ ) { final IntIterator sizes = sizes( i ); size = IntBigArrays.newBigArray( numberOfDocuments ); final IntIterator sizes = sizes( numIndices - 1 ); int s = 0;
size = IntBigArrays.newBigArray( numberOfDocuments ); for( int i = 0; i < numIndices; i++ ) { final IntIterator sizes = sizes( i ); size = IntBigArrays.newBigArray( numberOfDocuments ); final IntIterator sizes = sizes( numIndices - 1 ); int s = 0;
if ( indexingIsVirtual && virtualDocumentGap == 0 ) throw new IllegalArgumentException( "Illegal virtual document gap: " + virtualDocumentGap ); if ( indexingIsVirtual ) currSize = IntBigArrays.newBigArray( numVirtualDocs ); maxDocInBatch = ( currSize != null ? IntBigArrays.length( currSize ) : 0 ) -1; openSizeBitStream();
final int newKey[][] = IntBigArrays.newBigArray(newN); final long mask = newN - 1; // Note that this is used by the hashing macro final int newSegmentMask = newKey[0].length - 1;
final boolean newUsed[][] = BooleanBigArrays.newBigArray(newN); final long newKey[][] = LongBigArrays.newBigArray(newN); final int newValue[][] = IntBigArrays.newBigArray(newN); final long newMask = newN - 1; final int newSegmentMask = newKey[0].length - 1;