public int size() { return this.ints.size() + this.longs.size(); } }
while (longSet.size() < NUM_VALUES) { longSet.add(RANDOM.nextLong());
@Override public double getResults() { //return the unique number of recommended items return uniqueItems.size(); }
@Override public int size() { return neighbors.size(); }
/** * Constructs this object for running PageRank over a directed graph. * * @param graph the directed graph * @param nodes nodes in the graph * @param maxNodeId maximum node id * @param dampingFactor damping factor * @param maxIterations maximum number of iterations to run * @param tolerance L1 norm threshold for convergence */ public PageRank(OutIndexedDirectedGraph graph, LongOpenHashSet nodes, long maxNodeId, double dampingFactor, int maxIterations, double tolerance) { if (maxNodeId > Integer.MAX_VALUE) { throw new UnsupportedOperationException("maxNodeId exceeds Integer.MAX_VALUE!"); } this.graph = graph; this.nodes = nodes; this.maxNodeId = maxNodeId; this.dampingFactor = dampingFactor; this.nodeCount = nodes.size(); this.maxIterations = maxIterations; this.tolerance = tolerance; }
public int size() { return this.ints.size() + this.longs.size(); } }
@Override public boolean addAll(Collection<? extends Long> c) { // The resulting collection will be at least c.size() big if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
/** * Computes the cardinality of the HLL. * * @return the cardinality of HLL. This will never be negative. */ public long cardinality() { switch(type) { case EMPTY: return 0/*by definition*/; case EXPLICIT: return explicitStorage.size(); case SPARSE: return (long)Math.ceil(sparseProbabilisticAlgorithmCardinality()); case FULL: return (long)Math.ceil(fullProbabilisticAlgorithmCardinality()); default: throw new RuntimeException("Unsupported HLL type " + type); } }
/** * Computes the cardinality of the HLL. * * @return the cardinality of HLL. This will never be negative. */ public long cardinality() { switch(type) { case EMPTY: return 0/*by definition*/; case EXPLICIT: return explicitStorage.size(); case SPARSE: return (long)Math.ceil(sparseProbabilisticAlgorithmCardinality()); case FULL: return (long)Math.ceil(fullProbabilisticAlgorithmCardinality()); default: throw new RuntimeException("Unsupported HLL type " + type); } }
@Override public void write(DataOutput out) throws IOException { out.writeInt(neighbors.size()); LongIterator neighborsIt = neighbors.iterator(); while (neighborsIt.hasNext()) { out.writeLong(neighborsIt.nextLong()); } }
public TweetGenerator(IndexCollection.Args args, IndexCollection.Counters counters) throws IOException{ super(args, counters); if (!args.tweetDeletedIdsFile.isEmpty()) { deletes = new LongOpenHashSet(); File deletesFile = new File(args.tweetDeletedIdsFile); if (!deletesFile.exists()) { System.err.println("Error: " + deletesFile + " does not exist!"); System.exit(-1); } LOG.info("Reading deletes from " + deletesFile); FileInputStream fin = new FileInputStream(deletesFile); byte[] ignoreBytes = new byte[2]; fin.read(ignoreBytes); // "B", "Z" bytes from commandline tools BufferedReader br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fin))); String s; while ((s = br.readLine()) != null) { if (s.contains("\t")) { deletes.add(Long.parseLong(s.split("\t")[0])); } else { deletes.add(Long.parseLong(s)); } } br.close(); fin.close(); LOG.info("Read " + deletes.size() + " tweetids from deletes file."); } }
LOG.info("Read " + tweetids.size() + " tweetids.");
@Override public boolean addAll(LongCollection c) { if (f <= .5) ensureCapacity(c.size()); // The resulting collection will be sized for c.size() elements else tryCapacity(size() + c.size()); // The resulting collection will be tentatively sized for size() + c.size() // elements return super.addAll(c); } @Override
new Date(), statusCnt, tweets.size(), hashtags.size(), (float) statusCnt / duration, Runtime.getRuntime().totalMemory(), Runtime.getRuntime().freeMemory()));
divider = realK; }else if(type == Type.Recall){ divider = userTransactions.size(); }else{ throw new RuntimeException("Neither precision nor recall defined.");
public void loadFromIndex(BaseSearchIndex<R> index) throws IOException { LongSet delDocs = null; // delete docs in disk index first synchronized (this) { if (_delDocs != null && _delDocs.size() > 0) { delDocs = _delDocs; clearDeletes(); } } deleteDocs(delDocs); // open readOnly ram index reader ZoieMultiReader<R> reader = index.openIndexReader(); if (reader == null) { return; } // merge the readOnly ram index with the disk index IndexWriter writer = null; try { writer = openIndexWriter(null, null); writer.addIndexes(reader.directory()); writer.maybeMerge(); } finally { closeIndexWriter(); } }
public void loadFromIndex(BaseSearchIndex<R> index) throws IOException { LongSet delDocs = null; // delete docs in disk index first synchronized (this) { if (_delDocs != null && _delDocs.size() > 0) { delDocs = _delDocs; clearDeletes(); } } deleteDocs(delDocs); // open readOnly ram index reader ZoieMultiReader<R> reader = index.openIndexReader(); if (reader == null) { return; } // merge the readOnly ram index with the disk index IndexWriter writer = null; try { writer = openIndexWriter(null, null); writer.addIndexes(reader.directory()); writer.maybeMerge(); } finally { closeIndexWriter(); } }
case EXPLICIT: { final IWordSerializer serializer = schemaVersion.getSerializer(type, Long.SIZE, explicitStorage.size());
case EXPLICIT: { final IWordSerializer serializer = schemaVersion.getSerializer(type, Long.SIZE, explicitStorage.size());
if(explicitStorage.size() > explicitThreshold) { if(!sparseOff) { initializeStorage(HLLType.SPARSE);