private void load(@Nullable String cursorMark) { SolrQuery query = referenceQuery.getCopy(); query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); PartialResult<T> result = doLoad(query); process(result); }
public SolrSearchResult search(SolrQuery originalQuery) throws IntactSolrException { SolrQuery query = originalQuery.getCopy(); String[] fields = (String[]) ArrayUtils.add(FieldNames.DATA_FIELDS, "pkey"); if(query.getFields()!=null){ fields = (String[]) ArrayUtils.add(fields, query.getFields().split(",")); } query.setFields(fields); // if using a wildcard query we convert to lower case // as of http://mail-archives.apache.org/mod_mbox/lucene-solr-user/200903.mbox/%3CFD3AFB65-AEC1-40B2-A0A4-7E14A519AB05@ehatchersolutions.com%3E if (query.getQuery().contains("*")) { String[] tokens = query.getQuery().split(" "); StringBuilder sb = new StringBuilder(query.getQuery().length()); for (String token : tokens) { if (token.contains("*")) { sb.append(token.toLowerCase()); } else { sb.append(token); } sb.append(" "); } query.setQuery(sb.toString().trim()); } QueryResponse queryResponse = executeQuery(query); return new SolrSearchResult(solrServer, queryResponse); }
private void streamNextDocumentsFromSolr() { SolrQuery solrQuery = query.getCopy(); solrQuery.setRows(nrOfTimeSeriesPerBatch); solrQuery.setStart(currentDocumentCount); solrStreamingHandler.init(nrOfTimeSeriesPerBatch, currentDocumentCount); try { //Steam from solr connection.queryAndStreamResponse(solrQuery, solrStreamingHandler); //Convert the returning solr documents using the converter convertStream(); } catch (SolrServerException | IOException e) { LOGGER.warn("Exception while streaming the data points from Solr", e); } }
private void streamNextDocumentsFromSolr() { SolrQuery solrQuery = query.getCopy(); solrQuery.setRows(nrOfTimeSeriesPerBatch); solrQuery.setStart(currentDocumentCount); solrStreamingHandler.init(nrOfTimeSeriesPerBatch, currentDocumentCount); try { //Steam from solr connection.queryAndStreamResponse(solrQuery, solrStreamingHandler); //Convert the returning solr documents using the converter convertStream(); } catch (SolrServerException | IOException e) { LOGGER.warn("Exception while streaming the data points from Solr", e); } }
@Override public List<MCRObjectID> get(int from, int count) throws SwordServerException { final SolrQuery queryCopy = this.solrQuery.getCopy(); queryCopy.setStart(from); queryCopy.setRows(count); try { final QueryResponse queryResponse = MCRSolrClientFactory.getMainSolrClient().query(queryCopy); return queryResponse.getResults().stream() .map(r -> (String) r.getFieldValue("id")) .map(MCRObjectID::getInstance) .collect(Collectors.toList()); } catch (SolrServerException | IOException e) { throw new SwordServerException("Error while getting id list with MCRSword2SolrObjectIDSupplier and Query: " + this.solrQuery, e); } } }
public Multimap<String,InteractorIdCount> searchInteractors(SolrQuery originalQuery, String[] interactorTypeMis) throws IntactSolrException { SolrQuery query = originalQuery.getCopy(); query.setRows(0); query.setFacet(true); query.setFacetMinCount(1); query.setFacetLimit(Integer.MAX_VALUE); query.setFacetSort(FacetParams.FACET_SORT_COUNT); Multimap<String,InteractorIdCount> interactors = HashMultimap.create(); Map<String,String> fieldNameToTypeMap = new HashMap<String,String>(interactorTypeMis.length); for (String mi : interactorTypeMis) { final String fieldName = createFieldName(mi); query.addFacetField(fieldName); fieldNameToTypeMap.put(fieldName, mi); } QueryResponse queryResponse = executeQuery(query); for (Map.Entry<String,String> entry : fieldNameToTypeMap.entrySet()) { FacetField ff = queryResponse.getFacetField(entry.getKey()); if (ff != null && ff.getValues() != null) { for (FacetField.Count c : ff.getValues()) { interactors.put(entry.getValue(), new InteractorIdCount(c.getName(), c.getCount())); } } } return interactors; }
@Override public long getCount() throws SwordServerException { try { // make a copy to prevent multi threading issues final SolrQuery queryCopy = this.solrQuery.getCopy(); // only need the numFound queryCopy.setStart(0); queryCopy.setRows(0); final QueryResponse queryResponse = MCRSolrClientFactory.getMainSolrClient().query(queryCopy); return queryResponse.getResults().getNumFound(); } catch (SolrServerException | IOException e) { throw new SwordServerException( "Error while getting count with MCRSword2SolrObjectIDSupplier and Query: " + this.solrQuery, e); } }
private void initialStream(SolrQuery query, SolrClient connection) { try { //Make a copy of the query SolrQuery solrQuery = query.getCopy(); //We override the number of rows solrQuery.setRows(nrOfTimeSeriesPerBatch); //And the start solrQuery.setStart(currentDocumentCount); //init the streaming handler with solrStreamingHandler.init(nrOfTimeSeriesPerBatch, currentDocumentCount); QueryResponse response = connection.queryAndStreamResponse(solrQuery, solrStreamingHandler); //Set the global values nrOfAvailableTimeSeries = response.getResults().getNumFound(); queryStart = (long) response.getResponseHeader().get(ChronixSolrStorageConstants.QUERY_START_LONG); queryEnd = (long) response.getResponseHeader().get(ChronixSolrStorageConstants.QUERY_END_LONG); //Data is filled. We need not stream until it is read out needStream = false; } catch (SolrServerException | IOException e) { LOGGER.error("SolrServerException occurred while querying server.", e); } }
int chunkResults = 0; SolrQuery query = originalQuery.getCopy(); query.setRows(0);
private void initialStream(SolrQuery query, SolrClient connection) { try { //Make a copy of the query SolrQuery solrQuery = query.getCopy(); //We override the number of rows solrQuery.setRows(nrOfTimeSeriesPerBatch); //And the start solrQuery.setStart(currentDocumentCount); //init the streaming handler with solrStreamingHandler.init(nrOfTimeSeriesPerBatch, currentDocumentCount); QueryResponse response = connection.queryAndStreamResponse(solrQuery, solrStreamingHandler); //Set the global values nrOfAvailableTimeSeries = response.getResults().getNumFound(); queryStart = (long) response.getResponseHeader().get(ChronixSolrStorageConstants.QUERY_START_LONG); queryEnd = (long) response.getResponseHeader().get(ChronixSolrStorageConstants.QUERY_END_LONG); //Data is filled. We need not stream until it is read out needStream = false; } catch (SolrServerException | IOException e) { LOGGER.error("SolrServerException occurred while querying server.", e); } }
public Multimap<String,InteractorIdCount> searchInteractors(SolrQuery originalQuery, IntactFacetField[] intactFacetFields) throws IntactSolrException { SolrQuery query = originalQuery.getCopy(); query.setRows(0);