IntHashSet getNodeTypes(int node) { IntHashSet result = types[node]; if (result == null) { result = new IntHashSet(); types[node] = result; } return result; }
void schedulePropagation(Transition consumer, DependencyType type) { if (!consumer.destination.filter(type)) { return; } if (consumer.pendingTypes == null && propagationDepth < PROPAGATION_STACK_THRESHOLD && consumer.pointsToDomainOrigin() && consumer.destination.propagateCount < 20) { ++propagationDepth; consumer.consume(type); --propagationDepth; } else { if (consumer.pendingTypes == null) { pendingTransitions.add(consumer); consumer.pendingTypes = new IntHashSet(50); } consumer.pendingTypes.add(type.index); } }
IntHashSet set = new IntHashSet(activeLandmarkIndices.length); set.addAll(activeLandmarkIndices); int existingLandmarkCounter = 0; final int COUNT = Math.min(activeLandmarkIndices.length - 2, 2); } else { activeLandmarkIndices[i] = list.get(i).getValue(); if (set.contains(activeLandmarkIndices[i])) existingLandmarkCounter++;
void schedulePropagation(Transition consumer, DependencyType[] types) { if (types.length == 0) { return; } if (types.length == 1) { schedulePropagation(consumer, types[0]); return; } if (consumer.pendingTypes == null && propagationDepth < PROPAGATION_STACK_THRESHOLD && consumer.pointsToDomainOrigin() && consumer.destination.propagateCount < 20) { ++propagationDepth; consumer.consume(types); --propagationDepth; } else { if (consumer.pendingTypes == null) { pendingTransitions.add(consumer); consumer.pendingTypes = new IntHashSet(Math.max(50, types.length)); } consumer.pendingTypes.ensureCapacity(types.length + consumer.pendingTypes.size()); for (DependencyType type : types) { consumer.pendingTypes.add(type.index); } } }
IntHashSet precisions = new IntHashSet(); while(parser.nextToken() != Token.END_ARRAY) { precisions.add(parsePrecision(parser)); precision = precisions.toArray(); } else { precision = new int[] { parsePrecision(parser) };
private void relax(int nodeId) { IntHashSet connected = new IntHashSet(); int w; do { w = stack.pop(); onStack.clear(w); connected.add(w); } while (w != nodeId); connectedComponents.add(connected); int size = connected.size(); if (size < minSetSize) { minSetSize = size; } if (size > maxSetSize) { maxSetSize = size; } }
void addType(int variable, int degree, String typeName) { int entry = nodeMapping[packNodeAndDegree(variable, degree)]; if (getNodeTypes(entry).add(getTypeByName(typeName))) { nodeChanged[entry] = true; changed = true; } }
public static void main(String[] args) { final long start = System.currentTimeMillis(); final IntHashSet a = new com.carrotsearch.hppc.IntHashSet(); for( int i = 10000000; i-- != 0; ) a.add(i); IntHashSet b = new com.carrotsearch.hppc.IntHashSet(a.size()); b.addAll(a); b = new com.carrotsearch.hppc.IntHashSet(); b.addAll(a); final long time = System.currentTimeMillis() - start; System.out.println( time / 1000.0 ); System.out.println( b.size() ); } }
final IntHashSet transferFieldIds = new IntHashSet(); && !((ValueVectorReadExpression) expr).hasReadPath() && !isAnyWildcard && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])) { memoryManager.addTransferField(vvIn, TypedFieldId.getPath(id, incomingBatch), vvOut.getField().getName()); transfers.add(tp); transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]); } else if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
/** * Create a set from a variable number of arguments or an array of * <code>int</code>. The elements are copied from the argument to the * internal buffer. */ /* */ public static IntHashSet from(int... elements) { final IntHashSet set = new IntHashSet(elements.length); set.addAll(elements); return set; }
boolean isBlocked(int edgeId) { return blockedEdges.contains(edgeId); }
private void propagateAlongCasts() { for (ValueCast cast : casts) { int fromNode = nodeMapping[packNodeAndDegree(cast.fromVariable, 0)]; if (!formerNodeChanged[fromNode] && !nodeChanged[fromNode]) { continue; } int toNode = nodeMapping[packNodeAndDegree(cast.toVariable, 0)]; IntHashSet targetTypes = getNodeTypes(toNode); for (IntCursor cursor : types[fromNode]) { if (targetTypes.contains(cursor.value)) { continue; } String className = typeList.get(cursor.value); ValueType type; if (className.startsWith("[")) { type = ValueType.parseIfPossible(className); if (type == null) { type = ValueType.arrayOf(ValueType.object("java.lang.Object")); } } else { type = ValueType.object(className); } if (hierarchy.isSuperType(cast.targetType, type, false)) { changed = true; nodeChanged[toNode] = true; targetTypes.add(cursor.value); } } } }
/** * Adds all elements from the given list (vararg) to this set. * * @return Returns the number of elements actually added as a result of this * call (not previously present in the set). */ /* */ public final int addAll(int... elements) { ensureCapacity(elements.length); int count = 0; for (int e : elements) { if (add(e)) { count++; } } return count; }
IntegerArray orderedSuccessors = new IntegerArray(successors.length); if (terminalNodes.size() > 0) { IntSet loopNodes = IntHashSet.from(findNaturalLoop(node, terminalNodes.getAll())); for (int succ : successors) { if (loopNodes.contains(succ)) { IntSet outerSuccessors = new IntHashSet(successors.length); succList.clear(); for (IntCursor loopNode : loopNodes) {
private void propagateAlongDAG() { for (int i = propagationPath.length - 1; i >= 0; --i) { int node = propagationPath[i]; boolean predecessorsChanged = false; for (int predecessor : graph.incomingEdges(node)) { if (formerNodeChanged[predecessor] || nodeChanged[predecessor]) { predecessorsChanged = true; break; } } if (!predecessorsChanged) { continue; } IntHashSet nodeTypes = getNodeTypes(node); for (int predecessor : graph.incomingEdges(node)) { if (formerNodeChanged[predecessor] || nodeChanged[predecessor]) { if (nodeTypes.addAll(types[predecessor]) > 0) { nodeChanged[node] = true; changed = true; } } } } }
/** * {@inheritDoc} */ @Override public boolean isEmpty() { return size() == 0; }
/** * Adds all elements from the given {@link IntContainer} to this set. * * @return Returns the number of elements actually added as a result of this * call (not previously present in the set). */ public int addAll(IntContainer container) { ensureCapacity(container.size()); return addAll((Iterable<? extends IntCursor>) container); }
/** * This method is invoked when there is a new key to be inserted into * the buffer but there is not enough empty slots to do so. * * New buffers are allocated. If this succeeds, we know we can proceed * with rehashing so we assign the pending element to the previous buffer * (possibly violating the invariant of having at least one empty slot) * and rehash all keys, substituting new buffers at the end. */ protected void allocateThenInsertThenRehash(int slot, int pendingKey) { assert assigned == resizeAt && (( keys[slot]) == 0) && !((pendingKey) == 0); // Try to allocate new buffers first. If we OOM, we leave in a consistent state. final int[] prevKeys = this.keys; allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); assert this.keys.length > prevKeys.length; // We have succeeded at allocating new data so insert the pending key/value at // the free slot in the old arrays before rehashing. prevKeys[slot] = pendingKey; // Rehash old keys, including the pending key. rehash(prevKeys); }