@Async public void reindexAll(int batchSize) { LOGGER.info("Start creating search index."); if(batchSize <= 0) { batchSize = DEFAULT_BATCH_INDEX_SIZE; } try { FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager); fullTextEntityManager.createIndexer(INDEX_DOMAIN_CLASSES.toArray(new Class[0])) .typesToIndexInParallel(INDEX_DOMAIN_CLASSES.size()) .batchSizeToLoadObjects(batchSize) .threadsToLoadObjects(5) .idFetchSize(150) .startAndWait(); LOGGER.info("Successfully created search index."); } catch (InterruptedException e) { LOGGER.warn("An error occurred trying to build the search index: {}", e.toString()); } } }
fullTextEntityManager.flushToIndexes(); fullTextEntityManager.createIndexer(reindexMap.get(key)) .batchSizeToLoadObjects(100).cacheMode(CacheMode.NORMAL) .threadsToLoadObjects(4).startAndWait();
@Override public void index(String entity) { Class<?> clazz = getEntityClass( entity ); try ( Session session = hibernateSessionFactory.openSession() ) { FullTextSession fulltextSession = Search.getFullTextSession( session ); fulltextSession.createIndexer( clazz ) .batchSizeToLoadObjects( batchSize ) .cacheMode( CacheMode.NORMAL ) .threadsToLoadObjects( numberOfObjectLoadingThreads ) .startAndWait(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new SearchException( "Unable to complete indexing" ); } }
private void indexClass(Class<?> classToBeIndexed) { StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { getFullTextEntityManager(entityManager) // .createIndexer(classToBeIndexed) // .batchSizeToLoadObjects(batchSizeToLoadObjects) // .threadsToLoadObjects(threadsToLoadObjects) // .threadsForSubsequentFetching(threadsForSubsequentFetching) // .startAndWait(); } catch (InterruptedException e) { log.warn("Interrupted while indexing " + classToBeIndexed.getSimpleName(), e); Thread.currentThread().interrupt(); } finally { stopWatch.stop(); log.info("Indexed {} in {}", classToBeIndexed.getSimpleName(), stopWatch.toString()); } } }
/** * @param clazz */ private long reindexMassIndexer(final Class<?> clazz) { final Session session = sessionFactory.getCurrentSession(); final Criteria criteria = createCriteria(session, clazz, null, true); final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from). log.info("Starting (mass) re-indexing of " + number + " entries of type " + clazz.getName() + "..."); final FullTextSession fullTextSession = Search.getFullTextSession(session); try { fullTextSession.createIndexer(clazz)// .batchSizeToLoadObjects(25) // //.cacheMode(CacheMode.NORMAL) // .threadsToLoadObjects(5) // //.threadsForIndexWriter(1) // .threadsForSubsequentFetching(20) // .startAndWait(); } catch (final InterruptedException ex) { log.error("Exception encountered while reindexing: " + ex.getMessage(), ex); } final SearchFactory searchFactory = fullTextSession.getSearchFactory(); searchFactory.optimize(clazz); log.info("Re-indexing of " + number + " objects of type " + clazz.getName() + " done."); return number; }
@POST @Path("/reindex") public Response reindex(@QueryParam("limit") Long limit) { FullTextEntityManager fullTextEm = Search.getFullTextEntityManager( em ); MassIndexer indexer = fullTextEm.createIndexer( Page.class, User.class ) .purgeAllOnStart( true ) .typesToIndexInParallel( 2 ) .batchSizeToLoadObjects( 25 ) .idFetchSize( 150 ) .threadsToLoadObjects( 10 ) .cacheMode( CacheMode.IGNORE ); // Cache is likely to do more harm than good in our case (very few relations) if ( limit != null ) { indexer.limitIndexedObjectsTo( limit ); } indexer.start(); return Response.accepted().build(); }
private static void populateIndexes(final SessionFactoryImplementor sessionFactory, final int nbOfThreadLoad, final int nbOfThreadFetching) throws InterruptedException { FullTextSession fullTextSession = null; try { final Session session = sessionFactory.openSession(); fullTextSession = Search.getFullTextSession(session); final Transaction tx = fullTextSession.beginTransaction(); final MassIndexer indexer = fullTextSession.createIndexer(); indexer.batchSizeToLoadObjects(25).cacheMode(CacheMode.NORMAL).optimizeOnFinish(true); indexer.threadsToLoadObjects(nbOfThreadLoad).threadsForSubsequentFetching(nbOfThreadFetching).startAndWait(); fullTextSession.createIndexer(); tx.commit(); } finally { if (fullTextSession != null) { fullTextSession.close(); } } }
@TransactionAttribute(TransactionAttributeType.NEVER) public void indexConcerts() { try { Search.getFullTextEntityManager( entityManager ) .createIndexer() .batchSizeToLoadObjects( 1 ) .threadsToLoadObjects( 1 ) .transactionTimeout( 10 ) .cacheMode( CacheMode.IGNORE ) .startAndWait(); } catch (InterruptedException e) { throw new RuntimeException( e ); } } }
public boolean rebuildIndex() throws InterruptedException { FullTextEntityManager fullTextEntityManager = Search .getFullTextEntityManager( entityManager ); try { fullTextEntityManager .createIndexer() .batchSizeToLoadObjects( 30 ) .threadsToLoadObjects( 4 ) .cacheMode( CacheMode.NORMAL ) .startAndWait(); } catch (Exception e) { return false; } return true; }
@Test public void writeThenPurge() throws Exception { flush(); List<Level1> all = getAll(); assertEquals( "Wrong total number of entries", 3, all.size() ); // Expect 0 failure in the backend threads logged.expectLevelMissing( Level.ERROR ); Transaction tx = fullTextSession.beginTransaction(); fullTextSession.index( fullTextSession.get( Level1.class, 1L ) ); tx.commit(); tx = fullTextSession.beginTransaction(); fullTextSession.purgeAll( Level1.class ); tx.commit(); flush(); all = getAll(); assertEquals( "Wrong total number of entries. Index should be empty after purge.", 0, all.size() ); tx = fullTextSession.beginTransaction(); fullTextSession.createIndexer() .batchSizeToLoadObjects( 25 ) .threadsToLoadObjects( 1 ) .optimizeOnFinish( true ) .startAndWait(); tx.commit(); flush(); all = getAll(); assertEquals( "Wrong total number of entries.", 3, all.size() ); }
@Test @TestForIssue(jiraKey = "HSEARCH-2761") public void multiplePurges() throws Exception { flush(); List<Level1> all = getAll(); assertEquals( "Wrong total number of entries", 3, all.size() ); // Expect 0 failure in the backend threads logged.expectLevelMissing( Level.ERROR ); Transaction tx = fullTextSession.beginTransaction(); // Order is significant to reproduce the issue, see HSEARCH-2761 fullTextSession.purgeAll( Level2.class ); fullTextSession.purgeAll( Level3.class ); fullTextSession.purgeAll( Level1.class ); tx.commit(); flush(); all = getAll(); assertEquals( "Wrong total number of entries. Index should be empty after purge.", 0, all.size() ); tx = fullTextSession.beginTransaction(); fullTextSession.createIndexer() .batchSizeToLoadObjects( 25 ) .threadsToLoadObjects( 1 ) .optimizeOnFinish( true ) .startAndWait(); tx.commit(); flush(); all = getAll(); assertEquals( "Wrong total number of entries.", 3, all.size() ); }
@SuppressWarnings("unchecked") @Test public void testInterceptorWithMassIndexer() throws Exception { setAllBlogEntriesToStatus( BlogStatus.PUBLISHED ); List<Blog> blogEntries = getBlogEntries(); assertEquals( "Wrong total number of entries", 3, blogEntries.size() ); for ( Blog blog : blogEntries ) { assertTrue( blog.getStatus().equals( BlogStatus.PUBLISHED ) ); } Transaction tx = fullTextSession.beginTransaction(); fullTextSession.purgeAll( Blog.class ); fullTextSession.purgeAll( Article.class ); fullTextSession.purgeAll( TotalArticle.class ); tx.commit(); blogEntries = fullTextSession.createFullTextQuery( new MatchAllDocsQuery() ).list(); assertEquals( "Wrong total number of entries. Index should be empty after purge.", 0, blogEntries.size() ); tx = fullTextSession.beginTransaction(); fullTextSession.createIndexer() .batchSizeToLoadObjects( 25 ) .threadsToLoadObjects( 1 ) .threadsForSubsequentFetching( 2 ) .optimizeOnFinish( true ) .startAndWait(); tx.commit(); blogEntries = getBlogEntries(); assertEquals( "Wrong total number of entries.", 3, blogEntries.size() ); }
private void reindexAll() throws InterruptedException { FullTextSession fullTextSession = builder.openFullTextSession(); SilentProgressMonitor progressMonitor = new SilentProgressMonitor(); Assert.assertFalse( progressMonitor.finished ); try { fullTextSession.createIndexer( Object.class ) .threadsForSubsequentFetching( 8 ) .threadsToLoadObjects( 4 ) .batchSizeToLoadObjects( 30 ) .progressMonitor( progressMonitor ) .startAndWait(); } finally { fullTextSession.close(); } Assert.assertTrue( progressMonitor.finished ); }