AllocationMetricCollector(Method method, ThreadMXBean threadMXBean) { this.getThreadAllocatedBytes = method; this.threadMXBean = threadMXBean; previousResults = new Long2LongOpenHashMap(); previousResults.defaultReturnValue(NO_DATA); }
Long2LongMap newResults = new Long2LongOpenHashMap(); newResults.defaultReturnValue(NO_DATA); for (int i = 0; i < allThreadIds.length; i++) {
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
/** Clears this store. After a call to this method, the store can be reused. */ public void clear() throws IOException { locked = false; if (value2FrequencyMap != null) value2FrequencyMap = new Long2LongOpenHashMap(); reset(0); }
public Watchdog( GoMintServer server ) { this.watchdogMap = new Long2LongOpenHashMap(); this.removed = new Long2LongOpenHashMap(); server.getExecutorService().scheduleAtFixedRate( this, 0, 10, TimeUnit.MILLISECONDS ); }
/** Creates a chunked hash store with given transformation strategy, hash width and progress logger. * * @param transform a transformation strategy for the elements. * @param tempDir a temporary directory for the store files, or {@code null} for the current directory. * @param hashWidthOrCountValues if positive, no associated data is saved in the store: {@link Chunk#data(long)} will return this many lower bits * of the first of the three hashes associated with the key; zero, values are stored; if negative, values are stored and a map from values * to their frequency is computed. * @param pl a progress logger, or {@code null}. */ public ChunkedHashStore(final TransformationStrategy<? super T> transform, final File tempDir, final int hashWidthOrCountValues, final ProgressLogger pl) throws IOException { this.transform = transform; this.pl = pl; this.tempDir = tempDir; this.hashMask = hashWidthOrCountValues <= 0 ? 0 : -1L >>> Long.SIZE - hashWidthOrCountValues; if (hashWidthOrCountValues < 0) value2FrequencyMap = new Long2LongOpenHashMap(); file = new File[DISK_CHUNKS]; writableByteChannel = new WritableByteChannel[DISK_CHUNKS]; byteBuffer = new ByteBuffer[DISK_CHUNKS]; // Create disk chunks for(int i = 0; i < DISK_CHUNKS; i++) { byteBuffer[i] = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder()); writableByteChannel[i] = new FileOutputStream(file[i] = File.createTempFile(ChunkedHashStore.class.getSimpleName(), String.valueOf(i), tempDir)).getChannel(); file[i].deleteOnExit(); } count = new int[DISK_CHUNKS]; }
final Long2LongMap results = new Long2LongOpenHashMap(); final AtomicLong counter = new AtomicLong(0);
/** Creates a new subset lexical strategy. * @param subset the subset of terms. */ public FrequencyLexicalStrategy( final LongSet subset ) { final long[] t = subset.toLongArray(); Arrays.sort( t ); localNumber = new Long2LongOpenHashMap(); localNumber.defaultReturnValue( -1 ); for( int i = 0; i < t.length; i++ ) localNumber.put( t[ i ], i ); }
public UnionFindMapStorage(GraphDatabaseService db) { this.db = db; this.rootMap = new Long2LongOpenHashMap(); this.rankMap = new Long2IntOpenHashMap(); this.nodes = new NodeCounter().getNodeCount(db); }
public ConcurrentLockManager(final int lockManagerShrinkTimer, final int noArenas, final int tableSize) throws ACIDException { this.table = new ResourceGroupTable(tableSize); resArenaMgr = new ResourceArenaManager(noArenas, lockManagerShrinkTimer); reqArenaMgr = new RequestArenaManager(noArenas, lockManagerShrinkTimer); jobArenaMgr = new JobArenaManager(noArenas, lockManagerShrinkTimer); txnId2TxnSlotMap = Long2LongMaps.synchronize(new Long2LongOpenHashMap()); }
Batches() { this.instance_to_material = new Long2LongOpenHashMap(1024); this.material_to_instances = new Long2ReferenceOpenHashMap<>(1024); this.shader_to_materials = new Long2ReferenceOpenHashMap<>(1024); this.instance_materials = new Long2ReferenceOpenHashMap<>(1024); this.instance_shaders = new Long2ReferenceOpenHashMap<>(1024); this.instances = new Long2ReferenceOpenHashMap<>(1024); }
Singles() { this.instances = new Long2ReferenceOpenHashMap<>(1024); this.instance_to_material = new Long2LongOpenHashMap(1024); this.material_to_instances = new Long2ReferenceOpenHashMap<>(1024); this.shader_to_materials = new Long2ReferenceOpenHashMap<>(1024); this.instance_materials = new Long2ReferenceOpenHashMap<>(1024); this.instance_shaders = new Long2ReferenceOpenHashMap<>(1024); this.instances_sorted = new ObjectArrayList<>(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private void rebuildToCapacity(long newCapacity) { Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR); // rebuild to newLogCapacity. // This means that our current tree becomes a leftmost subtree // of the new tree. // E.g. when rebuilding a tree with logCapacity = 2 // (i.e. storing values in 0..3) to logCapacity = 5 (i.e. 0..31): // node 1 => 8 (+= 7 = 2^0*(2^3-1)) // nodes 2..3 => 16..17 (+= 14 = 2^1*(2^3-1)) // nodes 4..7 => 32..35 (+= 28 = 2^2*(2^3-1)) // This is easy to see if you draw it on paper. // Process the keys by "layers" in the original tree. long scaleR = newCapacity / capacity - 1; Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]); Arrays.sort(keys); long scaleL = 1; for (long k : keys) { while (scaleL <= k / 2) { scaleL <<= 1; } newNode2count.put(k + scaleL * scaleR, node2count.get(k)); } node2count = newNode2count; capacity = newCapacity; compressFully(); }
private static Long2LongOpenHashMap mapRemoteVertices( IPartitionBuilder partitionBuilder, LongArrayList remoteVertices) throws IOException { // map remote vertices Long2LongOpenHashMap remoteVerticesMappings = new Long2LongOpenHashMap(remoteVertices.size(), 1f); for (ISerializablePartition partition : partitionBuilder.getPartitions()) { for (long remoteVertexId : remoteVertices) { ISubgraph subgraph = partition.getSubgraphForVertex(remoteVertexId); if (subgraph != null && !subgraph.getVertex(remoteVertexId).isRemote()) { assert (!remoteVerticesMappings.containsKey(remoteVertexId)); remoteVerticesMappings.put(remoteVertexId, subgraph.getId()); } } } return remoteVerticesMappings; }
private static List<TermCount> mergeTermCountLists(List<TermCount>[] termCountListBuf, String field, boolean isIntField, int k) { final List<TermCount> ret; if (isIntField) { final Long2LongMap counts = new Long2LongOpenHashMap(k * 2); for (final List<TermCount> list : termCountListBuf) { for (final TermCount termCount : list) {
final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for(final LongIterator i = values.iterator(); i.hasNext();) { escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); counts.defaultReturnValue(-1); for(final LongIterator i = values.iterator(); i.hasNext();) { escape = (1 << best) - 1; System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length); final Long2LongOpenHashMap map = new Long2LongOpenHashMap(); map.defaultReturnValue(-1); for(int i = 0; i < escape; i++) map.put(remap[i], i);
final Long2LongOpenHashMap counts = new Long2LongOpenHashMap(); int width = 0; for(final LongIterator i = elements.iterator(); i.hasNext();) {
@Override public void compute(String label, String type, int iterations) { Long2DoubleMap srcMap = new Long2DoubleOpenHashMap(); Long2LongMap degreeMap = new Long2LongOpenHashMap(); dstMap = new Long2DoubleOpenHashMap(nodes);