public String strVal(int doc) { int ord=order[doc]; return ft.indexedToReadable(lookup[ord]); }
static void writeFieldVal(String val, FieldType ft, Appendable out, int flags) throws IOException { if (ft!=null) { try { out.append(ft.indexedToReadable(val)); } catch (Exception e) { out.append("EXCEPTION(val="); out.append(val); out.append(")"); } } else { out.append(val); } } /** @see #toString(Query,IndexSchema) */
public boolean facetTermNum(int docID, int statsTermNum) { int term = termNum[docID]; int arrIdx = term - startTermIndex; if (arrIdx >= 0 && arrIdx < nTerms) { String key = ft.indexedToReadable(terms[term]); HashMap<String, Integer> statsTermCounts = facetStatsTerms.get(statsTermNum); Integer statsTermCount = statsTermCounts.get(key); if (statsTermCount == null) { statsTermCounts.put(key, 1); } else { statsTermCounts.put(key, statsTermCount + 1); } return true; } return false; }
public void documentCollapsed(int docId, CollapseGroup collapseGroup, CollapseContext collapseContext) { for (AggregateField aggregateField : functions.keySet()) { FieldCache.StringIndex stringIndex = fieldCaches.get(aggregateField.getFieldName()); AggregateFunction function = functions.get(aggregateField); String fieldCacheValue = stringIndex.lookup[stringIndex.order[docId]]; String value = fieldTypes.get(aggregateField.getFieldName()).indexedToReadable(fieldCacheValue); function.collect(collapseGroup, value); } }
public boolean facet(int docID, Double v) { int term = termNum[docID]; int arrIdx = term - startTermIndex; if (arrIdx >= 0 && arrIdx < nTerms) { String key = ft.indexedToReadable(terms[term]); StatsValues stats = facetStatsValues.get(key); if (stats == null) { stats = new StatsValues(); facetStatsValues.put(key, stats); } if (v != null) { stats.accumulate(v); } else { stats.missing++; return false; } return true; } return false; }
protected int deleteInIndex(String indexedId) throws IOException { if (idField == null) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Operation requires schema to have a unique key field"); closeWriter(); openSearcher(); IndexReader ir = searcher.getReader(); TermDocs tdocs = null; int num=0; try { Term term = new Term(idField.getName(), indexedId); num = ir.deleteDocuments(term); if (core.log.isTraceEnabled()) { core.log.trace( core.getLogId()+"deleted " + num + " docs matching id " + idFieldType.indexedToReadable(indexedId)); } } finally { try { if (tdocs != null) tdocs.close(); } catch (Exception e) {} } return num; }
public void getResult(NamedList result, DocList docs, CollapseContext collapseContext) { Map<Integer, CollapseGroup> docHeadCollapseGroupAssoc = collapseContext.getDocumentHeadCollapseGroupAssociation(); for (DocIterator i = docs.iterator(); i.hasNext();) { int id = i.nextDoc(); CollapseGroup collapseGroup = docHeadCollapseGroupAssoc.get(id); if (collapseGroup == null) { continue; } NamedList collapsedGroupResult = getCollapseGroupResult(id, result); String fieldValue = collapseFieldType.indexedToReadable(collapseGroup.getKey()); collapsedGroupResult.add("fieldValue", fieldValue); } }
public String getPrintableId(IndexSchema schema) { SchemaField sf = schema.getUniqueKeyField(); if (indexedId != null) { return schema.getUniqueKeyField().getType().indexedToReadable(indexedId); } if (doc != null) { return schema.printableUniqueKey(doc); } if (solrDoc != null) { SolrInputField field = solrDoc.getField(sf.getName()); if (field != null) { return field.getFirstValue().toString(); } } return "(null)"; }
/** * This is a destructive call... the queue is empty at the end */ public NamedList<Integer> toNamedList( IndexSchema schema ) { // reverse the list.. List<TermInfo> aslist = new LinkedList<TermInfo>(); while( size() > 0 ) { aslist.add( 0, (TermInfo)pop() ); } NamedList<Integer> list = new NamedList<Integer>(); for (TermInfo i : aslist) { String txt = i.term.text(); SchemaField ft = schema.getFieldOrNull( i.term.field() ); if( ft != null ) { txt = ft.getType().indexedToReadable( txt ); } list.add( txt, i.docFreq ); } return list; } }
if (--off>=0) continue; if (--lim<0) break; res.add(ft.indexedToReadable(p.key), p.val); if (c<mincount || --off>=0) continue; if (--lim<0) break; res.add(ft.indexedToReadable(terms[startTermIndex+i]), c);
if (c >= mincount && --off<0) { if (--lim<0) break; res.add(ft.indexedToReadable(t.text()), c); if (--off>=0) continue; if (--lim<0) break; res.add(ft.indexedToReadable(p.key), p.val);
NamedList<Object> tokenNamedList = new SimpleOrderedMap<Object>(); String text = fieldType.indexedToReadable(token.term()); tokenNamedList.add("text", text); if (!text.equals(token.term())) {
Double v = null; if( raw != null ) { v = Double.parseDouble( all.ft.indexedToReadable(raw) ); allstats.accumulate( v );
String label = ft.indexedToReadable(getTermText(te, tnum)); res.add(label, c); if (--lim<0) break; String label = ft.indexedToReadable(getTermText(te, i)); res.add(label, c);
int c = doNegative ? maxTermCounts[i] - counts[i] : counts[i]; if (c == 0) continue; Double value = Double.parseDouble(ft.indexedToReadable(getTermText(te, i))); allstats.accumulate(value, c);
if (docFreq >= freqmin && docFreq <= freqmax) { String label = raw ? indexedText : ft.indexedToReadable(indexedText); if (sort) { queue.add(new CountPair<String, Integer>(label, docFreq));