/** * @see org.openmrs.api.db.ContextDAO#updateSearchIndexForObject(java.lang.Object) */ @Override @Transactional public void updateSearchIndexForObject(Object object) { FullTextSession session = Search.getFullTextSession(sessionFactory.getCurrentSession()); session.index(object); session.flushToIndexes(); }
if (index % 1000 == 0) { session.flushToIndexes(); session.flushToIndexes(); session.clear();
/** * Flush search indexes, to be done after a reindex() or reindexAll() operation */ public void flushSearchIndexes() { Session currentSession = sessionFactory.getCurrentSession(); final FullTextSession fullTextSession = Search.getFullTextSession(currentSession); fullTextSession.flushToIndexes(); } }
@Override void go(FullTextSession session) { session.purge(type, id); session.flushToIndexes(); }
@Override void go(FullTextSession session) { session.index(obj); session.flushToIndexes(); }
protected void flushSearchSession() { long begin = System.currentTimeMillis(); if (LUCENE_FLUSH_ALWAYS == true) { Search.getFullTextSession(getSession()).flushToIndexes(); } long end = System.currentTimeMillis(); log.info("BaseDao.flushSearchSession took: " + (end - begin) + " ms."); }
/** * Regenerates the index for a given class * * @param clazz the class * @param sess the hibernate session */ public static void reindex(Class clazz, Session sess) { FullTextSession txtSession = Search.getFullTextSession(sess); MassIndexer massIndexer = txtSession.createIndexer(clazz); try { massIndexer.startAndWait(); } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtSession.flushToIndexes(); } }
/** * Regenerates all the indexed class indexes * * @param async true if the reindexing will be done as a background thread * @param sess the hibernate session */ public static void reindexAll(boolean async, Session sess) { FullTextSession txtSession = Search.getFullTextSession(sess); MassIndexer massIndexer = txtSession.createIndexer(); massIndexer.purgeAllOnStart(true); try { if (!async) { massIndexer.startAndWait(); } else { massIndexer.start(); } } catch (InterruptedException e) { log.error("mass reindexing interrupted: " + e.getMessage()); } finally { txtSession.flushToIndexes(); } } }
private void purgeAll(String tenantId, Class<?> entityType) throws IOException { FullTextSession session = Search.getFullTextSession( openSessionWithTenantId( tenantId ) ); session.purgeAll( entityType ); session.flushToIndexes(); session.close(); }
FullTextSession session = Search.getFullTextSession(getSessionFactory().openSession()); session.setFlushMode(FlushMode.MANUAL); session.setCacheMode(CacheMode.IGNORE); Transaction transaction = session.beginTransaction(); ScrollableResults items = session.createQuery("from Item i ") .scroll(ScrollMode.FORWARD_ONLY); while(items.next()) { Object nextToIndex = items.get(0); session.index(nextToIndex); session.flushToIndexes(); session.clear(); } transaction.commit();
private void purgeAll(Class<?> entityType, String tenantId) throws IOException { FullTextSession session = Search.getFullTextSession( openSessionWithTenantId( tenantId ) ); session.purgeAll( entityType ); session.flushToIndexes(); session.close(); String indexName = getExtendedSearchIntegrator().getIndexBindings().get( entityType ) .getIndexManagerSelector().all().iterator().next().getIndexName(); assertThat( getNumberOfDocumentsInIndexByQuery( indexName, DocumentBuilderIndexedEntity.TENANT_ID_FIELDNAME, tenantId ) ).isEqualTo( 0 ); }
session.save( current ); if ( ( line_number % 10000 ) == 0 ) { fullTextSession.flushToIndexes(); session.getTransaction().commit(); session.close();
@Override public String process() throws Exception { boolean purgeAllOnStart = SerializationUtil.parseBooleanParameterOptional( PURGE_ALL_ON_START, serializedPurgeAllOnStart, Defaults.PURGE_ALL_ON_START ); boolean optimizeAfterPurge = SerializationUtil.parseBooleanParameterOptional( OPTIMIZE_AFTER_PURGE, serializedOptimizeAfterPurge, Defaults.OPTIMIZE_AFTER_PURGE ); if ( purgeAllOnStart ) { JobContextData jobData = (JobContextData) jobContext.getTransientUserData(); EntityManagerFactory emf = jobData.getEntityManagerFactory(); try ( Session session = PersistenceUtil.openSession( emf, tenantId ) ) { FullTextSession fts = Search.getFullTextSession( session ); jobData.getEntityTypes().forEach( clz -> fts.purgeAll( clz ) ); // This is necessary because the batchlet is not executed inside a transaction fts.flushToIndexes(); if ( optimizeAfterPurge ) { log.startOptimization(); fts.getSearchFactory().optimize(); } } } return null; } }
public Object doInJpa(EntityManager em) throws PersistenceException { FullTextSession fullTextSession = getFullTextSession(getHibernateSession(em)); fullTextSession.purgeAll(Webbis.class); // Do not update the second level cache. It will just slow things down. fullTextSession.setCacheMode(CacheMode.GET); // Read 5000 entries at a time. final int BATCH_SIZE = 5000; // Due to a bug in Hibernate (HHH-1283) a join does not work here. // See http://opensource.atlassian.com/projects/hibernate/browse/HHH-1283 ScrollableResults results = fullTextSession.createQuery("from Webbis w").scroll(); int index = 0; while (results.next()) { index++; fullTextSession.index(results.get(0)); // index each element if (index % BATCH_SIZE == 0) { fullTextSession.flushToIndexes(); // apply changes to indexes fullTextSession.clear(); // clear since the queue is processed } } return null; } });
session.persist( k ); session.flush(); Search.getFullTextSession( session ).flushToIndexes();
s.persist( incorrect ); s.flush(); s.flushToIndexes(); fail( "Incorrect bridge should fail" );
private void purgeAll(Class<?> entityType) throws IOException { FullTextSession session = Search.getFullTextSession( openSession() ); session.purgeAll( entityType ); session.flushToIndexes(); final int numDocs; try ( IndexReader indexReader = session.getSearchFactory().getIndexReaderAccessor().open( entityType ) ) { numDocs = indexReader.numDocs(); } session.close(); assertThat( numDocs ).isEqualTo( 0 ); }
s.persist( incorrect ); s.flush(); s.flushToIndexes(); fail( "Incorrect bridge should fail" );
private void performCleanUp(TestContext ctx) { log( "starting clean up phase" ); try ( Session s = ctx.sessionFactory.openSession() ) { final SessionImplementor session = (SessionImplementor) s; FullTextSession fulltextSession = Search.getFullTextSession( s ); beginTransaction( session ); s.createNativeQuery( "delete from book_author where book_id < :id" ).setParameter( "id", ctx.initialOffset ).executeUpdate(); s.createNativeQuery( "delete from book where id < :id" ).setParameter( "id", ctx.initialOffset ).executeUpdate(); s.createNativeQuery( "delete from author where id < :id" ).setParameter( "id", ctx.initialOffset ).executeUpdate(); fulltextSession.purgeAll( Book.class ); fulltextSession.flushToIndexes(); commitTransaction( session ); } }
s.index( o ); if ( index % 5 == 0 ) { s.flushToIndexes(); s.clear();