private Field readField() throws IOException { int flags = dis.read(); Index index = Index.NO; if ( ( flags & IndexDataWriter.F_INDEXED ) > 0 ) { boolean isTokenized = ( flags & IndexDataWriter.F_TOKENIZED ) > 0; index = isTokenized ? Index.ANALYZED : Index.NOT_ANALYZED; } Store store = Store.NO; if ( ( flags & IndexDataWriter.F_STORED ) > 0 ) { store = Store.YES; } String name = dis.readUTF(); String value = readUTF( dis ); return new Field( name, value, store, index ); }
/** * Unpack index data using specified Lucene Index writer * * @param is an input stream to unpack index data from * @param w a writer to save index data * @param ics a collection of index creators for updating unpacked documents. */ public static IndexDataReadResult unpackIndexData( final InputStream is, final Directory d, final IndexingContext context ) throws IOException { NexusIndexWriter w = new NexusIndexWriter( d, new NexusAnalyzer(), true ); try { IndexDataReader dr = new IndexDataReader( is ); return dr.readIndex( w, context ); } finally { IndexUtils.close( w ); } }
public IndexDataReadResult readIndex( IndexWriter w, IndexingContext context ) throws IOException { long timestamp = readHeader(); Date date = null; if ( timestamp != -1 ) { date = new Date( timestamp ); IndexUtils.updateTimestamp( w.getDirectory(), date ); } int n = 0; Document doc; while ( ( doc = readDocument() ) != null ) { w.addDocument( IndexUtils.updateDocument( doc, context, false ) ); n++; } w.commit(); w.forceMerge(1); w.commit(); IndexDataReadResult result = new IndexDataReadResult(); result.setDocumentCount( n ); result.setTimestamp( date ); return result; }
while ( ( doc = readDocument() ) != null )
/** * Unpack index data using specified Lucene Index writer * * @param is an input stream to unpack index data from * @param w a writer to save index data * @param ics a collection of index creators for updating unpacked documents. */ public static IndexDataReadResult unpackIndexData( final InputStream is, final Directory d, final IndexingContext context ) throws IOException { NexusIndexWriter w = new NexusIndexWriter( d, new NexusAnalyzer(), true ); try { IndexDataReader dr = new IndexDataReader( is ); return dr.readIndex( w, context ); } finally { IndexUtils.close( w ); } }
public IndexDataReadResult readIndex( IndexWriter w, IndexingContext context ) throws IOException long timestamp = readHeader(); Set<String> allGroups = new LinkedHashSet<>(); while ( ( doc = readDocument() ) != null )
while ( ( doc = readDocument() ) != null )
doc.add( readField() );
/** * Unpack index data using specified Lucene Index writer * * @param is an input stream to unpack index data from * @param w a writer to save index data * @param ics a collection of index creators for updating unpacked documents. */ public static Date unpackIndexData( final InputStream is, final Directory d, final IndexingContext context ) throws IOException { NexusIndexWriter w = new NexusIndexWriter( d, new NexusAnalyzer(), true ); try { IndexDataReader dr = new IndexDataReader( is ); IndexDataReadResult result = dr.readIndex( w, context ); return result.getTimestamp(); } finally { IndexUtils.close( w ); } }
public IndexDataReadResult readIndex( IndexWriter w, IndexingContext context ) throws IOException long timestamp = readHeader(); Set<String> allGroups = new LinkedHashSet<>(); while ( ( doc = readDocument() ) != null )
private Field readField() throws IOException { int flags = dis.read(); Index index = Index.NO; if ( ( flags & IndexDataWriter.F_INDEXED ) > 0 ) { boolean isTokenized = ( flags & IndexDataWriter.F_TOKENIZED ) > 0; index = isTokenized ? Index.ANALYZED : Index.NOT_ANALYZED; } Store store = Store.NO; if ( ( flags & IndexDataWriter.F_STORED ) > 0 ) { store = Store.YES; } String name = dis.readUTF(); String value = readUTF( dis ); return new Field( name, value, store, index ); }
while ( ( doc = readDocument() ) != null )
doc.add( readField() );
private Field readField() throws IOException { int flags = dis.read(); Index index = Index.NO; if ( ( flags & IndexDataWriter.F_INDEXED ) > 0 ) { boolean isTokenized = ( flags & IndexDataWriter.F_TOKENIZED ) > 0; index = isTokenized ? Index.ANALYZED : Index.NOT_ANALYZED; } Store store = Store.NO; if ( ( flags & IndexDataWriter.F_STORED ) > 0 ) { store = Store.YES; } String name = dis.readUTF(); String value = readUTF( dis ); return new Field( name, value, store, index ); }