int top = 0; IntIntHashMap entryVarMap = new IntIntHashMap(); for (int i = 0; i < argumentMapping.length; ++i) { Variable arg = argumentMapping[i]; if (arg != null) { entryVarMap.put(i, arg.getIndex()); Step step = stack[--top]; int node = step.node; IntIntMap varMap = new IntIntHashMap(step.varMap); BasicBlock block = program.basicBlockAt(node); result[node] = new IntIntHashMap(varMap); stack[top++] = new Step(successor, new IntIntHashMap(varMap));
/** * Return true if all keys of some other container exist in this container. */ protected boolean equalElements(IntIntHashMap other) { if (other.size() != size()) { return false; } for (IntIntCursor c : other) { int key = c.key; if (!containsKey(key) || !((get(key)) == (c.value))) { return false; } } return true; }
/** * Adds <code>incrementValue</code> to any existing value for the given <code>key</code> * or inserts <code>incrementValue</code> if <code>key</code> did not previously exist. * * @param key The key of the value to adjust. * @param incrementValue The value to put or add to the existing value if <code>key</code> exists. * @return Returns the current value associated with <code>key</code> (after changes). */ @Override public int addTo(int key, int incrementValue) { return putOrAdd(key, incrementValue, incrementValue); }
@Override public int removeAll(final int e) { final boolean hasKey = owner.containsKey(e); if (hasKey) { owner.remove(e); return 1; } else { return 0; } } };
/** * Update arc offsets assuming the given goto length. */ private int emitNodes(FSA fsa, OutputStream os, IntArrayList linearized) throws IOException { int offset = 0; // Add epsilon state. offset += emitNodeData(os, 0); if (fsa.getRootNode() != 0) offset += emitArc(os, BIT_LAST_ARC, (byte) '^', offsets.get(fsa.getRootNode())); else offset += emitArc(os, BIT_LAST_ARC, (byte) '^', 0); boolean offsetsChanged = false; final int max = linearized.size(); for (IntCursor c : linearized) { final int state = c.value; final int nextState = c.index + 1 < max ? linearized.get(c.index + 1) : NO_STATE; if (os == null) { offsetsChanged |= (offsets.get(state) != offset); offsets.put(state, offset); } else { assert offsets.get(state) == offset : state + " " + offsets.get(state) + " " + offset; } offset += emitNodeData(os, withNumbers ? numbers.get(state) : 0); offset += emitNodeArcs(fsa, os, state, nextState); } return offsetsChanged ? offset : 0; }
/** * If <code>key</code> exists, <code>putValue</code> is inserted into the map, * otherwise any existing value is incremented by <code>additionValue</code>. * * @param key * The key of the value to adjust. * @param putValue * The value to put if <code>key</code> does not exist. * @param incrementValue * The value to add to the existing value if <code>key</code> exists. * @return Returns the current value associated with <code>key</code> (after * changes). */ @Override public int putOrAdd(int key, int putValue, int incrementValue) { assert assigned < mask + 1; if (containsKey(key)) { putValue = get(key); putValue = (int) (((putValue) + (incrementValue))); } put(key, putValue); return putValue; }
@Override public int test() { final IntIntHashMap m_map = new IntIntHashMap( m_keys.length / 2 + 1, m_fillFactor ); int add = 0, remove = 0; while ( add < m_keys.length ) { m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.put( m_keys[ add ], m_keys[ add ] ); ++add; m_map.remove( m_keys[ remove++ ] ); } return m_map.size(); } }
/** * Assigns documents to cluster labels. */ void assignDocuments(LingoProcessingContext context) { final int [] clusterLabelFeatureIndex = context.clusterLabelFeatureIndex; final BitSet [] clusterDocuments = new BitSet [clusterLabelFeatureIndex.length]; final int [] labelsFeatureIndex = context.preprocessingContext.allLabels.featureIndex; final BitSet [] documentIndices = context.preprocessingContext.allLabels.documentIndices; final IntIntHashMap featureValueToIndex = new IntIntHashMap(); for (int i = 0; i < labelsFeatureIndex.length; i++) { featureValueToIndex.put(labelsFeatureIndex[i], i); } for (int clusterIndex = 0; clusterIndex < clusterDocuments.length; clusterIndex++) { clusterDocuments[clusterIndex] = documentIndices[featureValueToIndex .get(clusterLabelFeatureIndex[clusterIndex])]; } context.clusterDocuments = clusterDocuments; }
@Override public int test() { final IntIntHashMap m_map = new IntIntHashMap( m_keys.length, m_fillFactor ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ],m_keys[ i ] ); for ( int i = 0; i < m_keys.length; ++i ) m_map.put( m_keys[ i ],m_keys[ i ] ); return m_map.size(); } }
rcsStack[sp].tfByDocument = new IntIntHashMap(); rcsStack[sp].tfByDocument.put( documentIndexArray[suffixArray[i - 1]], 1); if (j == 0) rcsStack[sp].tfByDocument.putOrAdd(currentDocumentIndex, 1, 1); currentSuffixIndex + currentLcp - j, (j == 0 ? 2 : 1)); rcsStack[sp].tfByDocument = new IntIntHashMap(); rcsStack[sp].tfByDocument.put( documentIndexArray[suffixArray[i - 1]], 1); if (j == 0) rcsStack[sp].tfByDocument.putOrAdd(currentDocumentIndex, 1, 1); rcsStack[sp].tfByDocument.putOrAdd(currentDocumentIndex, 1, 1); if (rcsStack[sp].tfByDocument.size() >= dfThreshold)
public boolean add(int value) { return super.put(value, defaultValue)==0; }
@Override public int test() { int res = 0; for ( int i = 0; i < m_keys.length; ++i ) res = res ^ m_map.get( m_keys[ i ] ); return res; } }
/** * Merge data from one or more sparse arrays. */ public static int [] mergeSparseArrays(Iterable<int []> source) { final IntIntHashMap m = new IntIntHashMap(); for (int[] list : source) { final int max = list.length; for (int i = 0; i < max; i += 2) { final int v = list[i + 1]; m.putOrAdd(list[i], v, v); } } return hashToKeyValuePairs(m); }
@Override public int[] split(int[] domain, int[] nodes) { int[] copies = new int[nodes.length]; IntIntMap map = new IntIntHashMap(); for (int i = 0; i < nodes.length; ++i) { int node = nodes[i]; BasicBlock block = program.basicBlockAt(node); BasicBlock blockCopy = program.createBasicBlock(); ProgramUtils.copyBasicBlock(block, blockCopy); copies[i] = blockCopy.getIndex(); map.put(nodes[i], copies[i] + 1); } BasicBlockMapper copyBlockMapper = new BasicBlockMapper((int block) -> { int mappedIndex = map.get(block); return mappedIndex == 0 ? block : mappedIndex - 1; }); for (int copy : copies) { copyBlockMapper.transform(program.basicBlockAt(copy)); } for (int domainNode : domain) { copyBlockMapper.transform(program.basicBlockAt(domainNode)); } return copies; } }
public int visitNode(int node) { int index = visitedNodes.indexOf(node); if (index >= 0) { return visitedNodes.indexGet(index); } int fromHere = 0; for (int arc = fsa.getFirstArc(node); arc != 0; arc = fsa.getNextArc(arc)) { if (fsa.isArcFinal(arc)) fromHere++; if (!fsa.isArcTerminal(arc)) { fromHere += visitNode(fsa.getEndNode(arc)); } } visitedNodes.put(node, fromHere); return fromHere; } }
/** * <a href="http://trove4j.sourceforge.net">Trove</a>-inspired API method. An equivalent * of the following code: * <pre> * if (!map.containsKey(key)) map.put(value); * </pre> * * @param key The key of the value to check. * @param value The value to put if <code>key</code> does not exist. * @return <code>true</code> if <code>key</code> did not exist and <code>value</code> * was placed in the map. */ public boolean putIfAbsent(int key, int value) { if (!containsKey(key)) { put(key, value); return true; } else { return false; } }
@Override public int size() { return super.size(); }
final int before = size(); if (other.size() >= size() && other instanceof IntLookupContainer) { if (hasEmptyKey) { if (!((existing = keys[slot]) == 0) && other.contains(existing)) { shiftConflictingKeys(slot); } else { slot++; this.remove( c.value); return before - size();