@Override public boolean applySV(int value) { return _matchingValues.contains(value); } }
@Override public DateColumn unique() { IntSet ints = new IntOpenHashSet(data.size()); for (int i = 0; i < size(); i++) { ints.add(data.getInt(i)); } DateColumn copy = emptyCopy(ints.size()); copy.setName(name() + " Unique values"); copy.data = IntArrayList.wrap(ints.toIntArray()); return copy; }
@Override public void seal() { sealed = true; sortedIntList = new int[rawIntSet.size()]; rawIntSet.toArray(sortedIntList); Arrays.sort(sortedIntList); if (sortedIntList.length == 0) { min = null; max = null; return; } // Update min/max based on raw docs. min = sortedIntList[0]; max = sortedIntList[sortedIntList.length - 1]; // Merge the raw and aggregated docs, so stats for dictionary creation are collected correctly. int numAggregated = aggregatedIntSet.size(); if (numAggregated > 0) { rawIntSet.addAll(aggregatedIntSet); sortedIntList = new int[rawIntSet.size()]; rawIntSet.toArray(sortedIntList); Arrays.sort(sortedIntList); } } }
@Override public byte[] serialize(IntSet intSet) { int size = intSet.size(); byte[] bytes = new byte[Integer.BYTES + size * Integer.BYTES]; ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); byteBuffer.putInt(size); IntIterator iterator = intSet.iterator(); while (iterator.hasNext()) { byteBuffer.putInt(iterator.nextInt()); } return bytes; }
@Override public IntSet deserialize(ByteBuffer byteBuffer) { int size = byteBuffer.getInt(); IntSet intSet = new IntOpenHashSet(size); for (int i = 0; i < size; i++) { intSet.add(byteBuffer.getInt()); } return intSet; } };
if (currentNode != x) { final IntSet succSet = new IntOpenHashSet(); succSet.clear(); for(int d = g.outdegree(pseudoInverse[x]); d-- != 0;) succSet.add(map[i.nextInt()]); if (map[p = pseudoInverse[mid]] == x) { final LazyIntIterator i = g.successors(p); for(int d = g.outdegree(p); d-- != 0;) if ((t = map[i.nextInt()]) != -1) succSet.add(t); while (mid < remappedNodes && map[p = pseudoInverse[mid]] == x) { final LazyIntIterator i = g.successors(p); for(int d = g.outdegree(p); d-- != 0;) if ((t = map[i.nextInt()]) != -1) succSet.add(t); mid++; outdegree = succSet.size(); currentNode = x; succ = succSet.toIntArray(); if (outdegree > 0) IntArrays.quickSort(succ, 0, outdegree);
private final IntSet downKeys = new IntSet(20); downKeys.add(keycode); if (downKeys.size >= 2){ onMultipleKeysDown(keycode); downKeys.remove(keycode); return true; if (downKeys.contains(Input.Keys.ALT_LEFT) || downKeys.contains(Input.Keys.ALT_RIGHT)){ if (downKeys.size == 2 && mostRecentKeycode == Input.Keys.F4){ Gdx.app.exit(); downKeys.clear(); Gdx.input.setInputProcessor(inputAdapter);
if (matchingDictIds.isEmpty()) { return null; int numMatchingDictIds = matchingDictIds.size(); int numChildren = starTreeNode.getNumChildren(); while (childrenIterator.hasNext()) { StarTreeNode childNode = childrenIterator.next(); if (matchingDictIds.contains(childNode.getDimensionValue())) { queue.add( new SearchEntry(childNode, newRemainingPredicateColumns, searchEntry._remainingGroupByColumns)); IntIterator iterator = matchingDictIds.iterator(); while (iterator.hasNext()) { int matchingDictId = iterator.nextInt();
@Override public int[] getMatchingDictIds() { if (_matchingDictIds == null) { _matchingDictIds = _matchingDictIdSet.toIntArray(); } return _matchingDictIds; } }
PredicateEvaluator firstPredicateEvaluator = predicateEvaluators.get(0); for (int matchingDictId : firstPredicateEvaluator.getMatchingDictIds()) { matchingDictIds.add(matchingDictId); for (int i = 1; i < numPredicateEvaluators; i++) { if (matchingDictIds.isEmpty()) { return matchingDictIds; IntIterator iterator = matchingDictIds.iterator(); while (iterator.hasNext()) { if (!predicateEvaluator.applySV(iterator.nextInt())) {
/** * Builds a new frame only with rows not specified in mapping. */ default Frame removeRows(Mapping mapping) { IntSet remove = new IntOpenHashSet(mapping.toList()); IntList map = new IntArrayList(Math.min(0, rowCount() - remove.size())); for (int i = 0; i < rowCount(); i++) { if (!remove.contains(i)) { map.add(i); } } return mapRows(Mapping.wrap(map)); }
public IntIterator iterator() { return _set.iterator(); }
for ( int i = 0; dBuffer.hasRemaining( ); i++ ) dBuffer.put( hSet.contains( i ) ? TRUE : FALSE ); for ( IntIterator it = dSet.iterator( ); it.hasNext( ); ) if ( !hSet.contains( index ) ) for ( IntIterator it = hSet.iterator( ); it.hasNext( ); ) dSet.clear( ); dSet.addAll( hSet ); hSetChanged = false;
@Override public TimeColumn unique() { IntSet ints = new IntOpenHashSet(data); TimeColumn column = emptyCopy(ints.size()); column.data = IntArrayList.wrap(ints.toIntArray()); column.setName(name() + " Unique values"); return column; }
@Override public DateColumn unique() { IntSet ints = new IntOpenHashSet(data.size()); for (int i = 0; i < size(); i++) { ints.add(data.getInt(i)); } return DateColumn.create(name() + " Unique values", IntArrayList.wrap(ints.toIntArray())); }
protected void update(boolean sparse) { IntSet s = sparse ? sparse_updated_indices : dense_updated_indices; MajorVector w = weight_vector.getMajorVector(sparse); MajorVector d = diagonals .getMajorVector(sparse); MajorVector g = gradients .getMajorVector(sparse); int[] indices = s.toIntArray(); Arrays.sort(indices); updateDiagonals(d, g, indices); updateWeights (w, g, indices, sparse); clearGraidents (g, indices); s.clear(); }