/** * Create an initial Lucene index for the data already present in the * database. * This method is called when Spring's startup. */ @Override public void onApplicationEvent(final ApplicationReadyEvent event) { try { FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager); fullTextEntityManager.createIndexer().startAndWait(); } catch (InterruptedException e) { System.out.println( "An error occurred trying to build the serach index: " + e.toString()); } return; }
/** * @see ContextDAO#updateSearchIndexAsync() */ @Override public Future<?> updateSearchIndexAsync() { try { log.info("Started asynchronously updating the search index..."); return Search.getFullTextSession(sessionFactory.getCurrentSession()).createIndexer().start(); } catch (Exception e) { throw new RuntimeException("Failed to start asynchronous search index update", e); } }
private static void populateIndexes(final SessionFactoryImplementor sessionFactory, final int nbOfThreadLoad, final int nbOfThreadFetching) throws InterruptedException { FullTextSession fullTextSession = null; try { final Session session = sessionFactory.openSession(); fullTextSession = Search.getFullTextSession(session); final Transaction tx = fullTextSession.beginTransaction(); final MassIndexer indexer = fullTextSession.createIndexer(); indexer.batchSizeToLoadObjects(25).cacheMode(CacheMode.NORMAL).optimizeOnFinish(true); indexer.threadsToLoadObjects(nbOfThreadLoad).threadsForSubsequentFetching(nbOfThreadFetching).startAndWait(); fullTextSession.createIndexer(); tx.commit(); } finally { if (fullTextSession != null) { fullTextSession.close(); } } }
@Override public void index(String entity) { Class<?> clazz = getEntityClass( entity ); try ( Session session = hibernateSessionFactory.openSession() ) { FullTextSession fulltextSession = Search.getFullTextSession( session ); fulltextSession.createIndexer( clazz ) .batchSizeToLoadObjects( batchSize ) .cacheMode( CacheMode.NORMAL ) .threadsToLoadObjects( numberOfObjectLoadingThreads ) .startAndWait(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new SearchException( "Unable to complete indexing" ); } }
@POST @Path("/reindex") public Response reindex(@QueryParam("limit") Long limit) { FullTextEntityManager fullTextEm = Search.getFullTextEntityManager( em ); MassIndexer indexer = fullTextEm.createIndexer( Page.class, User.class ) .purgeAllOnStart( true ) .typesToIndexInParallel( 2 ) .batchSizeToLoadObjects( 25 ) .idFetchSize( 150 ) .threadsToLoadObjects( 10 ) .cacheMode( CacheMode.IGNORE ); // Cache is likely to do more harm than good in our case (very few relations) if ( limit != null ) { indexer.limitIndexedObjectsTo( limit ); } indexer.start(); return Response.accepted().build(); }
@Async public void reindexAll(int batchSize) { LOGGER.info("Start creating search index."); if(batchSize <= 0) { batchSize = DEFAULT_BATCH_INDEX_SIZE; } try { FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager); fullTextEntityManager.createIndexer(INDEX_DOMAIN_CLASSES.toArray(new Class[0])) .typesToIndexInParallel(INDEX_DOMAIN_CLASSES.size()) .batchSizeToLoadObjects(batchSize) .threadsToLoadObjects(5) .idFetchSize(150) .startAndWait(); LOGGER.info("Successfully created search index."); } catch (InterruptedException e) { LOGGER.warn("An error occurred trying to build the search index: {}", e.toString()); } } }
/** * Regenerates all the indexed class indexes * * @param async true if the reindexing will be done as a background thread * @param entityManager the entity manager */ public static void reindexAll(boolean async, EntityManager entityManager) { FullTextEntityManager txtentityManager = Search.getFullTextEntityManager(entityManager); MassIndexer massIndexer = txtentityManager.createIndexer(); massIndexer.purgeAllOnStart(true); try { if (!async) { massIndexer.startAndWait(); } else { massIndexer.start(); } } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtentityManager.flushToIndexes(); } } }
private void rebuildIndexWithMassIndexer(Class<?> entityType, String tenantId) throws Exception { FullTextSession session = Search.getFullTextSession( openSessionWithTenantId( tenantId ) ); session.createIndexer( entityType ).purgeAllOnStart( true ).startAndWait(); session.close(); String indexName = getExtendedSearchIntegrator().getIndexBindings().get( entityType ) .getIndexManagerSelector().all().iterator().next().getIndexName(); assertThat( getNumberOfDocumentsInIndexByQuery( indexName, DocumentBuilderIndexedEntity.TENANT_ID_FIELDNAME, tenantId ) ).isGreaterThan( 0 ); }
@Test public void testBatchIndexing() throws Exception { SessionFactory sessionFactory = (SessionFactory) bundleContext.getService( serviceReference ); FullTextSession fullTextSession = Search.getFullTextSession( sessionFactory.openSession() ); AssertingMassIndexerProgressMonitor progressMonitor = new AssertingMassIndexerProgressMonitor( 0 ); fullTextSession.createIndexer( Muppet.class ).progressMonitor( progressMonitor ).startAndWait(); progressMonitor.assertExpectedProgressMade(); persistElmo( fullTextSession ); progressMonitor = new AssertingMassIndexerProgressMonitor( 1 ); fullTextSession.createIndexer( Muppet.class ).progressMonitor( progressMonitor ).startAndWait(); progressMonitor.assertExpectedProgressMade(); }
@Test @RequiresDialect(comment = "MySQL definitely should accept Integer.MIN_VALUE", strictMatching = false, value = org.hibernate.dialect.MySQLDialect.class) public void testSetFetchSizeOnMySQL() throws InterruptedException { SearchIntegrator searchIntegrator = getExtendedSearchIntegrator(); MockErrorHandler mockErrorHandler = MassIndexerErrorReportingTest.getErrorHandler( searchIntegrator ); FullTextSession fullTextSession = MassIndexerErrorReportingTest.prepareSomeData( this ); fullTextSession.createIndexer( Book.class ).idFetchSize( Integer.MIN_VALUE ).startAndWait(); getSession().close(); String errorMessage = mockErrorHandler.getErrorMessage(); Assert.assertEquals( null, errorMessage ); }
.optimizeAfterPurge( true ) .optimizeOnFinish( true ) .startAndWait();
private void indexClass(Class<?> classToBeIndexed) { StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { getFullTextEntityManager(entityManager) // .createIndexer(classToBeIndexed) // .batchSizeToLoadObjects(batchSizeToLoadObjects) // .threadsToLoadObjects(threadsToLoadObjects) // .threadsForSubsequentFetching(threadsForSubsequentFetching) // .startAndWait(); } catch (InterruptedException e) { log.warn("Interrupted while indexing " + classToBeIndexed.getSimpleName(), e); Thread.currentThread().interrupt(); } finally { stopWatch.stop(); log.info("Indexed {} in {}", classToBeIndexed.getSimpleName(), stopWatch.toString()); } } }
fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(reindexMap.get(key)) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).startAndWait();
/** * Regenerates all the indexed class indexes * * @param async true if the reindexing will be done as a background thread * @param sess the hibernate session */ public static void reindexAll(boolean async, Session sess) { FullTextSession txtSession = Search.getFullTextSession(sess); MassIndexer massIndexer = txtSession.createIndexer(); massIndexer.purgeAllOnStart(true); try { if (!async) { massIndexer.startAndWait(); } else { massIndexer.start(); } } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtSession.flushToIndexes(); } } }
private void startAndWaitMassIndexing(Class<?> entityType) throws InterruptedException, IOException { FullTextSession session = Search.getFullTextSession( openSession() ); session.createIndexer( entityType ).purgeAllOnStart( true ).startAndWait(); final int numDocs; try ( IndexReader indexReader = session.getSearchFactory().getIndexReaderAccessor().open( entityType ) ) { numDocs = indexReader.numDocs(); } assertThat( numDocs ).isGreaterThan( 0 ); }
private void massIndexFooInstances(MassIndexerProgressMonitor monitor) throws InterruptedException { FullTextSession fullTextSession = Search.getFullTextSession( openSession() ); MassIndexer massIndexer = fullTextSession.createIndexer( Foo.class ); massIndexer.progressMonitor( monitor ); massIndexer.startAndWait(); fullTextSession.close(); }
@Test @RequiresDialect(comment = "H2 does not accept negative fetch sizes", strictMatching = true, value = org.hibernate.dialect.H2Dialect.class) public void testSetFetchSizeOnH2Fails() throws InterruptedException { SearchIntegrator searchIntegrator = getExtendedSearchIntegrator(); MockErrorHandler mockErrorHandler = MassIndexerErrorReportingTest.getErrorHandler( searchIntegrator ); FullTextSession fullTextSession = MassIndexerErrorReportingTest.prepareSomeData( this ); fullTextSession.createIndexer( Book.class ).idFetchSize( -1 ).startAndWait(); getSession().close(); String errorMessage = mockErrorHandler.getErrorMessage(); Assert.assertEquals( "HSEARCH000211: An exception occurred while the MassIndexer was fetching the primary identifiers list", errorMessage ); Throwable exception = mockErrorHandler.getLastException(); Assert.assertTrue( exception instanceof org.hibernate.exception.GenericJDBCException ); }
/** * @param clazz */ private long reindexMassIndexer(final Class<?> clazz) { final Session session = sessionFactory.getCurrentSession(); final Criteria criteria = createCriteria(session, clazz, null, true); final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from). log.info("Starting (mass) re-indexing of " + number + " entries of type " + clazz.getName() + "..."); final FullTextSession fullTextSession = Search.getFullTextSession(session); try { fullTextSession.createIndexer(clazz)// .batchSizeToLoadObjects(25) // //.cacheMode(CacheMode.NORMAL) // .threadsToLoadObjects(5) // //.threadsForIndexWriter(1) // .threadsForSubsequentFetching(20) // .startAndWait(); } catch (final InterruptedException ex) { log.error("Exception encountered while reindexing: " + ex.getMessage(), ex); } final SearchFactory searchFactory = fullTextSession.getSearchFactory(); searchFactory.optimize(clazz); log.info("Re-indexing of " + number + " objects of type " + clazz.getName() + " done."); return number; }
public boolean rebuildIndex() throws InterruptedException { FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager( entityManager ); try { fullTextEntityManager .createIndexer() .batchSizeToLoadObjects( 30 ) .threadsToLoadObjects( 4 ) .cacheMode( CacheMode.NORMAL ) .startAndWait(); } catch (Exception e) { return false; } return true; }
/** * @see ContextDAO#updateSearchIndex() */ @Override public void updateSearchIndex() { try { log.info("Updating the search index... It may take a few minutes."); Search.getFullTextSession(sessionFactory.getCurrentSession()).createIndexer().startAndWait(); GlobalProperty gp = Context.getAdministrationService().getGlobalPropertyObject( OpenmrsConstants.GP_SEARCH_INDEX_VERSION); if (gp == null) { gp = new GlobalProperty(OpenmrsConstants.GP_SEARCH_INDEX_VERSION); } gp.setPropertyValue(OpenmrsConstants.SEARCH_INDEX_VERSION.toString()); Context.getAdministrationService().saveGlobalProperty(gp); log.info("Finished updating the search index"); } catch (Exception e) { throw new RuntimeException("Failed to update the search index", e); } }