private List<Object[]> doQuery(Query mltQuery) { List<EntityInfo> results = helper.hsQuery( mltQuery, Coffee.class ) .projection( ProjectionConstants.ID, ProjectionConstants.SCORE ) .queryEntityInfos(); return results.stream().map( EntityInfo::getProjection ).collect( Collectors.toList() ); }
public ListAssert asResultProjectionsAsLists() { HSQuery hsQuery = getHSQuery(); List<EntityInfo> results = hsQuery.queryEntityInfos(); List<List<Object>> projections = results.stream() .map( EntityInfo::getProjection ) .map( Arrays::asList ) // Take advantage of List.equals when calling ListAssert.containsExactly, for instance .collect( Collectors.toList() ); return Assertions.assertThat( projections ) .as( "Projections of results of query " + toString( hsQuery ) ); }
@Override public List<E> list() throws SearchException { hSearchQuery.getTimeoutManager().start(); final List<EntityInfo> entityInfos = hSearchQuery.queryEntityInfos(); return (List<E>) getResultLoader().load(entityInfos); }
public ListAssert asResultIds() { HSQuery hsQuery = getHSQuery(); List<EntityInfo> results = hsQuery.queryEntityInfos(); List<Serializable> ids = results.stream() .map( EntityInfo::getId ) .collect( Collectors.toList() ); return Assertions.assertThat( ids ) .as( "IDs of results of query " + toString( hsQuery ) ); }
private static void verifyMatches(SearchIntegrator searchIntegrator, int expectedMatches, Query query) { List<EntityInfo> queryEntityInfos = searchIntegrator.createHSQuery( query, Book.class ) .queryEntityInfos(); Assert.assertEquals( expectedMatches, queryEntityInfos.size() ); }
public ListAssert asResults() { HSQuery hsQuery = getHSQuery(); List<EntityInfo> results = hsQuery.queryEntityInfos(); return Assertions.assertThat( results ) .as( "Results of query " + toString( hsQuery ) ); }
@Override public List<E> list() throws SearchException { partitionHandlingSupport.checkCacheAvailable(); HSQuery hSearchQuery = queryDefinition.getHsQuery(); hSearchQuery.getTimeoutManager().start(); List<EntityInfo> entityInfos = hSearchQuery.queryEntityInfos(); return (List<E>) getResultLoader(hSearchQuery).load(entityInfos); }
@Test @TestForIssue(jiraKey = "HSEARCH-1987") public void testNumericMappingOfEmbeddedFields() { List<EntityInfo> list = helper.hsQuery( ScoreBoard.class ) .projection( ProjectionConstants.DOCUMENT ) .queryEntityInfos(); assertEquals( 1, list.size() ); Document document = (Document) list.iterator().next().getProjection()[0]; IndexableField scoreNumeric = document.getField( "score_id" ); assertThat( scoreNumeric.numericValue() ).isEqualTo( 1 ); IndexableField beta = document.getField( "score_beta" ); assertThat( beta.numericValue() ).isEqualTo( 100 ); }
protected List doHibernateSearchList() { hSearchQuery.getTimeoutManager().start(); final List<EntityInfo> entityInfos = hSearchQuery.queryEntityInfos(); Loader loader = getLoader(); List list = loader.load( entityInfos ); //no need to timeoutManager.isTimedOut from this point, we don't do anything intensive if ( resultTransformer == null || loader instanceof ProjectionLoader ) { //stay consistent with transformTuple which can only be executed during a projection //nothing to do } else { list = resultTransformer.transformList( list ); } hSearchQuery.getTimeoutManager().stop(); return list; }
@Test @TestForIssue(jiraKey = "HSEARCH-1987") public void testOneOfSeveralFieldsIsNumeric() { List<EntityInfo> list = helper.hsQuery( TouristAttraction.class ) .projection( ProjectionConstants.DOCUMENT ) .queryEntityInfos(); assertEquals( 1, list.size() ); Document document = (Document) list.iterator().next().getProjection()[0]; IndexableField scoreNumeric = document.getField( "scoreNumeric" ); assertThat( scoreNumeric.numericValue() ).isEqualTo( 23 ); IndexableField scoreString = document.getField( "scoreString" ); assertThat( scoreString.numericValue() ).isNull(); assertThat( scoreString.stringValue() ).isEqualTo( "23" ); }
private List<EntityInfo> search(String field, String keyword) throws ParseException { ExtendedSearchIntegrator integrator = sfHolder.getSearchFactory(); QueryParser parser = new QueryParser( field, TestConstants.standardAnalyzer ); Query query = parser.parse( keyword ); List<EntityInfo> result = integrator.createHSQuery( query, Book.class ).queryEntityInfos(); return result; }
@Override public ResultIterator<E> iterator(FetchOptions fetchOptions) throws SearchException { if (fetchOptions.getFetchMode() == FetchOptions.FetchMode.EAGER) { hSearchQuery.getTimeoutManager().start(); List<EntityInfo> entityInfos = hSearchQuery.queryEntityInfos(); return filterNulls(new EagerIterator<>(entityInfos, getResultLoader(), fetchOptions.getFetchSize())); } else if (fetchOptions.getFetchMode() == FetchOptions.FetchMode.LAZY) { DocumentExtractor extractor = hSearchQuery.queryDocumentExtractor(); //triggers actual Lucene search return filterNulls(new LazyIterator<>(extractor, getResultLoader(), fetchOptions.getFetchSize())); } else { throw new IllegalArgumentException("Unknown FetchMode " + fetchOptions.getFetchMode()); } }
@Test public void searchBeforeMaxResultWindow() throws Exception { generateData( 0, DEFAULT_MAX_RESULT_WINDOW + 10 ); Query query = builder().all().createQuery(); List<EntityInfo> results = getQuery( query ) .firstResult( DEFAULT_MAX_RESULT_WINDOW - 5 ).maxResults( 5 ) .queryEntityInfos(); assertEquals( 5, results.size() ); assertEquals( DEFAULT_MAX_RESULT_WINDOW - 5, results.get( 0 ).getId() ); }
@Override public ResultIterator<E> iterator(FetchOptions fetchOptions) throws SearchException { partitionHandlingSupport.checkCacheAvailable(); HSQuery hSearchQuery = queryDefinition.getHsQuery(); if (fetchOptions.getFetchMode() == FetchOptions.FetchMode.EAGER) { hSearchQuery.getTimeoutManager().start(); List<EntityInfo> entityInfos = hSearchQuery.queryEntityInfos(); return filterNulls(new EagerIterator<>(entityInfos, getResultLoader(hSearchQuery), fetchOptions.getFetchSize())); } else if (fetchOptions.getFetchMode() == FetchOptions.FetchMode.LAZY) { DocumentExtractor extractor = hSearchQuery.queryDocumentExtractor(); //triggers actual Lucene search return filterNulls(new LazyIterator<>(extractor, getResultLoader(hSearchQuery), fetchOptions.getFetchSize())); } else { throw new IllegalArgumentException("Unknown FetchMode " + fetchOptions.getFetchMode()); } }
@Test public void multipleResults_singleClass() throws Exception { EntityWithMissingIdWhenRetrievedFromIndex entity1 = new EntityWithMissingIdWhenRetrievedFromIndex(); entity1.id = "1"; EntityWithMissingIdWhenRetrievedFromIndex entity2 = new EntityWithMissingIdWhenRetrievedFromIndex(); entity2.id = "2"; helper.add( entity1, entity2 ); thrown.expect( SearchException.class ); thrown.expectMessage( "HSEARCH000338" ); thrown.expectMessage( "Incomplete entity information" ); thrown.expectMessage( "'" + EntityWithMissingIdWhenRetrievedFromIndex.class.getName() + "'" ); sfHolder.getSearchFactory().createHSQuery( new MatchAllDocsQuery(), EntityWithMissingIdWhenRetrievedFromIndex.class ) .queryEntityInfos() .iterator().next().getId(); }
@Test(expected = SearchException.class) public void searchBeyondMaxResultWindow() throws Exception { generateData( 0, DEFAULT_MAX_RESULT_WINDOW + 10 ); Query query = builder().all().createQuery(); getQuery( query ) .firstResult( DEFAULT_MAX_RESULT_WINDOW + 1 ).maxResults( 5 ) .queryEntityInfos(); }
@Test(expected = ClassCastException.class) public void singleField_numericFieldBridge_missingValue_use_nonRaw() throws Exception { Sort sort = builder().sort() .byField( "fieldBridgedNumericField" ) .onMissingValue().use( new WrappedDoubleValue( 1.5d ) ) .createSort(); sfHolder.getSearchFactory().createHSQuery( new MatchAllDocsQuery(), IndexedEntry.class ) .sort( sort ) .queryEntityInfos(); }
@Test public void serializeDeserializeLuceneHSQuery() throws ClassNotFoundException, IOException { final ExtendedSearchIntegrator integrator = sfHolder.getSearchFactory(); Book book = new Book(); book.title = "Java Serialization"; book.text = "The black art of object serialization is full of pitfalls even for experienced developers"; helper.add( book ); QueryBuilder queryBuilder = integrator.buildQueryBuilder().forEntity( Book.class ).get(); Query luceneQuery = queryBuilder.keyword().onField( "text" ).matching( "art" ).createQuery(); HSQuery hsQuery = integrator.createHSQuery( luceneQuery, Book.class ); //Lucene Queries are not serializable: who's using LuceneHSQuery will need to //encode the query separately and set it again. hsQuery.luceneQuery( null ); HSQuery clonedQuery = SerializationTestHelper.duplicateBySerialization( hsQuery ); clonedQuery.afterDeserialise( integrator ); clonedQuery.luceneQuery( luceneQuery ); List<EntityInfo> result = clonedQuery.queryEntityInfos(); Assert.assertEquals( 1, result.size() ); }
@Test(expected = ClassCastException.class) public void singleField_numericFieldBridge_nonMetadataProviding_missingValue_use_nonRaw() throws Exception { Query query = builder().all().createQuery(); Sort sort = builder().sort() .byField( "nonMetadataProvidingFieldBridgedNumericField", SortField.Type.DOUBLE ) .onMissingValue().use( new WrappedDoubleValue( 1.5d ) ) .createSort(); sfHolder.getSearchFactory().createHSQuery( query, IndexedEntry.class ) .sort( sort ) .queryEntityInfos(); }
@Test(expected = SearchException.class) @TestForIssue(jiraKey = "HSEARCH-2678") public void testOverridingSeveralAnalyzers() { QueryBuilder qb = sfHolder.getSearchFactory() .buildQueryBuilder() .forEntity( Book.class ) .overridesForField( "author", "titleAnalyzer" ) .overridesForField( "title", "authorAnalyzer" ) .get(); Query query = qb.simpleQueryString() .onFields( "title", "author" ) .withAndAsDefaultOperator() .matching( "Molière" ) .createQuery(); HSQuery hsQuery = sfHolder.getSearchFactory().createHSQuery( query, Book.class ); hsQuery.sort( new Sort( new SortField( "title_sort", SortField.Type.STRING ) ) ); hsQuery.queryEntityInfos(); }