/** Returns all terms in this posting list */ public String[] termSet() { return occurrences.keys(new String[0]); }
public Object[] toArray() { return map.keys(); }
public T[] keys(T[] array) { return m_map.keys(array); }
@SuppressWarnings("unchecked") public Set<T> keySet() { Object[] okeys = m_map.keys(); HashSet<T> keyset = new HashSet<T>(); for(Object o:okeys) { keyset.add((T)o); } return keyset; }
/** {@inheritDoc} */ @Override public String[] getReverseKeys() { return key2forwardOffset.keys(new String[key2forwardOffset.size()]); }
/** {@inheritDoc} */ @Override public String[] getReverseKeys() { return key2forwardOffset.keys(new String[key2forwardOffset.size()]); }
/** {@inheritDoc} */ @Override public String[] getMetaKeys() { String[] rtr = metaMap.keys(new String[metaMap.size()]); Arrays.sort(rtr, (String o1, String o2) -> metaMap.get(o1) - metaMap.get(o2)); return rtr; }
/** {@inheritDoc} */ @Override public String[] getMetaKeys() { String[] rtr = metaMap.keys(new String[metaMap.size()]); Arrays.sort(rtr, (String o1, String o2) -> metaMap.get(o1) - metaMap.get(o2)); return rtr; }
final String[] terms = cache_termids.keys(new String[termCount]); Arrays.sort(terms, new Comparator<String>(){ public int compare(String o1, String o2) {
metaKeys = keys.keys(new String[keys.size()]);
metaKeys = keys.keys(new String[keys.size()]);
/** Stores the lexicon tree to a lexicon stream as a sequence of entries. * The binary tree is traversed in order, by called the method * traverseAndStoreToStream. * @param lexiconStream The lexicon output stream to store to. */ public void storeToStream(LexiconOutputStream<String> lexiconStream, TermCodes termCodes) throws IOException { final String[] terms = tfs.keys(new String[0]); Arrays.sort(terms); BasicLexiconEntry le = new BasicLexiconEntry(); for (String t : terms) { le.setTermId(termCodes.getCode(t)); le.setStatistics(nts.get(t), tfs.get(t)); le.setMaxFrequencyInDocuments(maxtfs.get(t)); lexiconStream.writeNextEntry(t, le); } }
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
Object[] keys = phrases[ti].keys(); int[] values = phrases[ti].getValues(); double counts[] = new double[keys.length];
/** Stores the lexicon tree to a lexicon stream as a sequence of entries. * The binary tree is traversed in order, by called the method * traverseAndStoreToStream. * @param lexiconStream The lexicon output stream to store to. */ @Override public void storeToStream(LexiconOutputStream<String> lexiconStream, TermCodes termCodes) throws IOException { final String[] terms = tfs.keys(new String[0]); Arrays.sort(terms); for (String t : terms) { final FieldLexiconEntry fle = new FieldLexiconEntry(getFieldFrequency(t)); fle.setTermId(termCodes.getCode(t)); fle.setStatistics(nts.get(t), tfs.get(t)); fle.setMaxFrequencyInDocuments(maxtfs.get(t)); final int[] TFf = new int[fieldCount]; for(int fi=0;fi< fieldCount;fi++) TFf[fi] = field_tfs[fi].get(t); fle.setFieldFrequencies(TFf); lexiconStream.writeNextEntry(t, fle); } }
for(Object o : terms.keys())