TIntHashSet candidateUnitSet = new TIntHashSet();
TIntHashSet candidateUnitSet = new TIntHashSet();
private void initCommon(int capacity) { m_paramIndices = new TIntHashSet(); m_onceThrough = false; }
private void initCommon(int capacity) { m_paramIndices = new TIntHashSet(); m_onceThrough = false; }
private void initCommon(int capacity, int batchsize) { m_paramIndices = new TIntHashSet(); m_onceThrough = false; m_savedFunctionValues = new LDouble[batchsize]; for (int i = 0; i < batchsize; i++) { m_savedFunctionValues[i] = new LDouble(); } }
private void initCommon(int capacity, int batchsize) { m_paramIndices = new TIntHashSet(); m_onceThrough = false; m_savedFunctionValues = new LDouble[batchsize]; for (int i = 0; i < batchsize; i++) { m_savedFunctionValues[i] = new LDouble(); } }
/** * Creates the an instance of the class with a given * query identifier. * @param _queryid String the query identifier. */ public QrelsHashSet(String _queryid){ this.queryid = _queryid; nonRelDocnos = new THashSet<String>(); relGrade = new TIntHashSet(); relGradeDocnosMap = new TIntObjectHashMap<THashSet<String>>(); }
/** * Creates the an instance of the class with a given * query identifier. * @param _queryid String the query identifier. */ public QrelsHashSet(String _queryid){ this.queryid = _queryid; nonRelDocnos = new THashSet<String>(); relGrade = new TIntHashSet(); relGradeDocnosMap = new TIntObjectHashMap<THashSet<String>>(); }
/** * union 2 int[] * @param arr1 * @param arr2 * @return int[] */ public static int[] union(int[] arr1, int[] arr2) { TIntHashSet set = new TIntHashSet(); set.addAll(arr1); set.addAll(arr2); int[] arr = set.toArray(); Arrays.sort(arr); return arr; }
/** * union 2 int[] * @param arr1 * @param arr2 * @return int[] */ public static int[] union(int[] arr1, int[] arr2) { TIntHashSet set = new TIntHashSet(); set.addAll(arr1); set.addAll(arr2); int[] arr = set.toArray(); Arrays.sort(arr); return arr; }
private TIntHashSet filterByTopProbabilities(int docId, int topK) { TIntDoubleHashMap topProbRank = new TIntDoubleHashMap((int) (testSize + testSize * 0.25), (float) 0.75); Set<Entry<Short, ClassifierRangeWithScore>> entries = classification.getDocumentScoresAsSet(docId); Iterator<Entry<Short, ClassifierRangeWithScore>> iterator = entries.iterator(); while (iterator.hasNext()) { Entry<Short, ClassifierRangeWithScore> next = iterator.next(); if (categoriesFilter.contains(next.getKey())) { ClassifierRangeWithScore value = next.getValue(); topProbRank.put(next.getKey(), probability(Math.abs(value.score - value.border), next.getKey())); } } Ranker r = new Ranker(); return new TIntHashSet(r.get(topProbRank).toNativeArray(0, topK)); }
public StaticRank(int trainSize, ClassificationScoreDB classification, TIntHashSet categoriesFilter) { this.trainSize = trainSize; this.classification = classification; this.testSize = classification.getDocumentCount(); if (categoriesFilter == null || categoriesFilter.isEmpty()) { this.categoriesFilter = new TIntHashSet((int) (testSize + testSize * 0.25), (float) 0.75); for (short i = 0; i < classification.getDocumentScoresAsSet(0).size(); i++) { this.categoriesFilter.add(i); } } else { this.categoriesFilter = categoriesFilter; } }
/** Insert a term into this document, occurs at given block id, and in the given fields */ public void insert(String t, int[] fieldIds, int blockId) { super.insert(t, fieldIds); TIntHashSet blockids = null; if ((blockids = term_blocks.get(t)) == null) { term_blocks.put(t, blockids = new TIntHashSet(/*TODO */)); } blockids.add(blockId); blockCount++; }
/** Insert a term into this document tf times, occurs at given block id, and in the given fields */ public void insert(int tf, String t, int[] fieldIds, int blockId) { super.insert(tf, t, fieldIds); TIntHashSet blockids = null; if ((blockids = term_blocks.get(t)) == null) { term_blocks.put(t, blockids = new TIntHashSet(/*TODO */)); } blockids.add(blockId); blockCount++; }
/** Insert a term into this document, occurs at given block id, and in the given field */ public void insert(String t, int fieldId, int blockId) { super.insert(t, fieldId); TIntHashSet blockids = null; if ((blockids = term_blocks.get(t)) == null) { term_blocks.put(t, blockids = new TIntHashSet(/*TODO */)); } blockids.add(blockId); blockCount++; }
/** Insert a term into this document, occurs at given block id */ public void insert(String t, int blockId) { insert(t); TIntHashSet blockids = null; if ((blockids = term_blocks.get(t)) == null) { term_blocks.put(t, blockids = new TIntHashSet(/*TODO */)); } blockids.add(blockId); blockCount++; }
private TIntHashSet[] recoverRecoveryHandlers(SimpleXARecoveryHandler[] handlers) throws SimpleXAResourceException { TIntHashSet[] phaseSets = new TIntHashSet[handlers.length]; boolean allEmpty = true; for (int i = 0; i < handlers.length; i++) { phaseSets[i] = new TIntHashSet(handlers[i].recover()); if (!allEmpty && phaseSets[i].isEmpty()) { throw new SimpleXAResourceException("Unable to find common phase in pre-existing database"); } else if (!phaseSets[i].isEmpty()) { allEmpty = false; } } return !allEmpty ? phaseSets : null; }
/** * return intersection between two int[] * @param arr1 * @param arr2 * @return intersection */ public static int[] intersection(int[] arr1, int[] arr2) { TIntHashSet set = new TIntHashSet(); set.addAll(arr1); Arrays.sort(arr2); TIntArrayList list = new TIntArrayList(); for (int i : arr2) { if (set.contains(i)) { list.add(i); } } return list.toNativeArray(); }
public int[] recover() throws SimpleXAResourceException { int[] nodePhases = persistentNodePool.recover(); int[] stringPhases = persistentStringPool.recover(); // If both are empty then return empty. if (nodePhases.length == 0 && stringPhases.length == 0) { return nodePhases; } // Else if the intersection is empty *error*, TIntHashSet phaseSet = new TIntHashSet(nodePhases); phaseSet.retainAll(stringPhases); if (phaseSet.isEmpty()) { throw new SimpleXAResourceException("No matching phases between Node and String Pools."); } //else return the intersection. return phaseSet.toArray(); }
public void processTerm(String term) { if (term != null) { TIntHashSet freqs = new TIntHashSet(0); for (String docField : docFields) freqs.add(fieldIDs.get(docField)); if (fieldIDs.containsKey("ELSE") && freqs.size() == 0) freqs.add(fieldIDs.get("ELSE")); ((FieldDocumentPostingList) fdpl).insert(term, freqs.toArray()); } }