Refine search
private void addPatterns(String id, Map<Integer, Set<E>> p, boolean commit) { try{ setIndexWriter(); Document doc = new Document(); doc.add(new StringField("sentid", id, Field.Store.YES)); doc.add(new Field("patterns", getBytes(p), LuceneFieldType.NOT_INDEXED)); indexWriter.addDocument(doc); if(commit){ indexWriter.commit(); //closeIndexWriter(); } }catch(IOException e){ throw new RuntimeException(e); } }
private static void writeSomething( IndexReference indexReference ) throws IOException { IndexWriter writer = indexReference.getWriter(); writer.addDocument( new Document() ); writer.commit(); }
document.add( new StoredField( TX_STATE_KEY, TX_STATE_VALUE ) ); cachedDocuments.put( id, document ); add = true; document.add( new StringField( ORPHANS_KEY, ORPHANS_VALUE, Store.NO ) ); addOrphan( null ); document.add( new StringField( ORPHANS_KEY, key, Store.NO ) ); addOrphan( key ); writer.addDocument( document );
@Override protected void add(List<CoreLabel> tokens, String sentid, boolean addProcessedText){ try{ setIndexWriter(); Document doc = new Document(); for(CoreLabel l : tokens) { for (Map.Entry<String, String> en: transformCoreLabeltoString.apply(l).entrySet()) { doc.add(new StringField(en.getKey(), en.getValue(), Field.Store.YES));//, ANALYZED)); } if(addProcessedText){ String ptxt = l.get(PatternsAnnotations.ProcessedTextAnnotation.class); if(!stopWords.contains(ptxt.toLowerCase())) doc.add(new StringField(Token.getKeyForClass(PatternsAnnotations.ProcessedTextAnnotation.class), ptxt, Field.Store.YES));//, ANALYZED)); } } doc.add(new StringField("sentid", sentid, Field.Store.YES)); if(tokens!= null && saveTokens) doc.add(new Field("tokens", getProtoBufAnnotation(tokens), LuceneFieldType.NOT_INDEXED)); indexWriter.addDocument(doc); }catch(IOException e){ throw new RuntimeException(e); } }
fa.setFoldingEnabled(RuntimeEnvironment.getInstance().isFoldingEnabled()); Document doc = new Document(); try (Writer xrefOut = newXrefWriter(fa, path)) { analyzerGuru.populateDocument(doc, file, path, fa, xrefOut); writer.addDocument(doc); } catch (Throwable t) { cleanupResources(doc);
private void indexRecord(TxnHeader header, Record record, AtomicInteger count, AtomicLong from, AtomicLong to) throws IOException { if ( record instanceof CreateTxn ) { CreateTxn createTxn = (CreateTxn)record; EntryTypes type = createTxn.getEphemeral() ? EntryTypes.CREATE_EPHEMERAL: EntryTypes.CREATE_PERSISTENT; Document document = makeDocument(header, type, count, from, to); addPath(document, createTxn.getPath()); addData(document, createTxn.getData()); writer.addDocument(document); } else if ( record instanceof DeleteTxn ) { DeleteTxn deleteTxn = (DeleteTxn)record; Document document = makeDocument(header, EntryTypes.DELETE, count, from, to); addPath(document, deleteTxn.getPath()); writer.addDocument(document); } else if ( record instanceof SetDataTxn ) { SetDataTxn setDataTxn = (SetDataTxn)record; NumericField versionField = new NumericField(FieldNames.VERSION, Field.Store.YES, true); versionField.setIntValue(setDataTxn.getVersion()); Document document = makeDocument(header, EntryTypes.SET_DATA, count, from, to); addPath(document, setDataTxn.getPath()); addData(document, setDataTxn.getData()); document.add(versionField); writer.addDocument(document); } }
private void addCluster(IndexWriter iwriter, StopCluster stopCluster) throws IOException { Document doc = new Document(); doc.add(new TextField("name", stopCluster.name, Field.Store.YES)); doc.add(new DoubleField("lat", stopCluster.lat, Field.Store.YES)); doc.add(new DoubleField("lon", stopCluster.lon, Field.Store.YES)); doc.add(new StringField("id", stopCluster.id, Field.Store.YES)); doc.add(new StringField("category", Category.CLUSTER.name(), Field.Store.YES)); iwriter.addDocument(doc); }
@Test void createWritablePartition() throws Exception { try ( AbstractIndexPartition indexPartition = new WritableIndexPartitionFactory( IndexWriterConfigs::standard ) .createPartition( testDirectory.directory(), directory ) ) { try ( IndexWriter indexWriter = indexPartition.getIndexWriter() ) { indexWriter.addDocument( new Document() ); indexWriter.commit(); indexPartition.maybeRefreshBlocking(); try ( PartitionSearcher searcher = indexPartition.acquireSearcher() ) { assertEquals( 1, searcher.getIndexSearcher().getIndexReader().numDocs(), "We should be able to see newly added document " ); } } } }
fields = globalDocumentBuilder.createDescriptorFields(image); for (Field field : fields) { doc.add(field); doc.add(field); doc.add(field); writer.addDocument(doc);
private static void insertRandomDocuments( IndexWriter writer ) throws IOException { Document doc = new Document(); doc.add( new StringField( "a", "b", Field.Store.YES ) ); doc.add( new StringField( "c", "d", Field.Store.NO ) ); writer.addDocument( doc ); writer.commit(); }
public void index(Node node, Reader indexStream) { try { Document doc = new Document() ; LARQ.store(doc, node) ; LARQ.index(doc, indexStream) ; getIndexWriter().addDocument(doc) ; } catch (IOException ex) { throw new ARQLuceneException("index", ex) ; } } }
fields = globalDocumentBuilder.createDescriptorFields(image); for (Field field : fields) { doc.add(field); doc.add(field); doc.add(field); writer.addDocument(doc);
/** * Writes a document to contain the serialized version of {@code settings}, * with a {@link QueryBuilder#OBJUID} value set to * {@link #INDEX_ANALYSIS_SETTINGS_OBJUID}. An existing version of the * document is first deleted. * @param writer a defined, target instance * @param settings a defined instance * @throws IOException if I/O error occurs while writing Lucene */ public void write(IndexWriter writer, IndexAnalysisSettings settings) throws IOException { byte[] objser = settings.serialize(); writer.deleteDocuments(new Term(QueryBuilder.OBJUID, INDEX_ANALYSIS_SETTINGS_OBJUID)); Document doc = new Document(); StringField uidfield = new StringField(QueryBuilder.OBJUID, INDEX_ANALYSIS_SETTINGS_OBJUID, Field.Store.NO); doc.add(uidfield); doc.add(new StoredField(QueryBuilder.OBJSER, objser)); doc.add(new StoredField(QueryBuilder.OBJVER, INDEX_ANALYSIS_SETTINGS_OBJVER)); writer.addDocument(doc); }
public void index(Node node, String indexStr) { try { Document doc = new Document() ; LARQ.store(doc, node) ; LARQ.index(doc, indexStr) ; getIndexWriter().addDocument(doc) ; } catch (IOException ex) { throw new ARQLuceneException("index", ex) ; } }
public void put(Object key, OIdentifiable value, Document doc) { if (deleted.remove(value.getIdentity().toString())) { doc.add(OLuceneIndexType.createField(TMP, value.getIdentity().toString(), Field.Store.YES)); updated.add(value.getIdentity().toString()); } try { writer.addDocument(doc); } catch (IOException e) { throw OException.wrapException(new OLuceneIndexException("unable to add document to changes index"), e); } }
Document doc = new Document(); doc.add(field); if (writer.getConfig().getOpenMode() == OpenMode.CREATE) { writer.addDocument(doc); } else { writer.updateDocument(new Term("content",strs[i]), doc);
public void index(RDFNode rdfNode, Reader indexStream) { try { Document doc = new Document() ; LARQ.store(doc, rdfNode.asNode()) ; LARQ.index(doc, indexStream) ; getIndexWriter().addDocument(doc) ; } catch (IOException ex) { throw new ARQLuceneException("index", ex) ; } }
Document doc = new Document(); doc.add(new Field(LUCENE_FIELD_QUERY, query, Field.Store.YES, Field.Index.NOT_ANALYZED)); long time = System.currentTimeMillis(); doc.add(new Field(LUCENE_FIELD_MODIFIED, DateTools.timeToString(time, DateTools.Resolution.SECOND), Field.Store.YES, Field.Index.NOT_ANALYZED)); columnName = LUCENE_FIELD_COLUMN_PREFIX + columnName; doc.add(new Field(columnName, data, Field.Store.NO, Field.Index.ANALYZED)); buff.appendExceptFirst(" "); Field.Index.ANALYZED)); try { indexAccess.writer.addDocument(doc); if (commitIndex) { commitIndex();
public void index(RDFNode rdfNode, String indexStr) { try { Document doc = new Document() ; LARQ.store(doc, rdfNode.asNode()) ; LARQ.index(doc, indexStr) ; getIndexWriter().addDocument(doc) ; } catch (IOException ex) { throw new ARQLuceneException("index", ex) ; } }
private void addCorner(IndexWriter iwriter, StreetVertex sv) throws IOException { String mainStreet = null; String crossStreet = null; // TODO score based on OSM street type, using intersection nodes instead of vertices. for (StreetEdge pse : Iterables.filter(sv.getOutgoing(), StreetEdge.class)) { if (mainStreet == null) mainStreet = pse.getName(); else crossStreet = pse.getName(); } if (mainStreet == null || crossStreet == null) return; if (mainStreet.equals(crossStreet)) return; Document doc = new Document(); doc.add(new TextField("name", mainStreet + " & " + crossStreet, Field.Store.YES)); doc.add(new DoubleField("lat", sv.getLat(), Field.Store.YES)); doc.add(new DoubleField("lon", sv.getLon(), Field.Store.YES)); doc.add(new StringField("category", Category.CORNER.name(), Field.Store.YES)); iwriter.addDocument(doc); }