@Override public void onIssuesRemoval(String projectUuid, List<String> issueKeys) { issueIndexer.deleteByKeys(projectUuid, issueKeys); }
private void doAfterSave(DbSession dbSession, Collection<IssueDto> issues) { indexer.commitAndIndexIssues(dbSession, issues); }
private void doIndex(Iterator<IssueDoc> issues, Size size, IndexingListener listener) { BulkIndexer bulk = createBulkIndexer(size, listener); bulk.start(); while (issues.hasNext()) { IssueDoc issue = issues.next(); bulk.add(newIndexRequest(issue)); } bulk.stop(); }
private void indexIssues() { issueIndexer.indexOnStartup(issueIndexer.getIndexTypes()); }
/** * Commits the DB transaction and adds the issues to Elasticsearch index. * <p> * If indexing fails, then the recovery daemon will retry later and this * method successfully returns. Meanwhile these issues will be "eventually * consistent" when requesting the index. */ public void commitAndIndexIssues(DbSession dbSession, Collection<IssueDto> issues) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); issues.stream() .map(issue -> createQueueDto(issue.getKey(), ID_TYPE_ISSUE_KEY, issue.getProjectUuid())) // a mutable ListMultimap is needed for doIndexIssueItems, so MoreCollectors.index() is // not used .forEach(i -> itemsByIssueKey.put(i.getDocId(), i)); dbClient.esQueueDao().insert(dbSession, itemsByIssueKey.values()); dbSession.commit(); doIndexIssueItems(dbSession, itemsByIssueKey); }
private IndexingResult doIndexProjectItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByProjectUuid) { if (itemsByProjectUuid.isEmpty()) { return new IndexingResult(); } // one project, referenced by es_queue.doc_id = many issues IndexingListener listener = new OneToManyResilientIndexingListener(dbClient, dbSession, itemsByProjectUuid.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); for (String projectUuid : itemsByProjectUuid.keySet()) { // TODO support loading of multiple projects in a single SQL request try (IssueIterator issues = issueIteratorFactory.createForProject(projectUuid)) { if (issues.hasNext()) { do { IssueDoc doc = issues.next(); bulkIndexer.add(newIndexRequest(doc)); } while (issues.hasNext()); } else { // project does not exist or has no issues. In both case // all the documents related to this project are deleted. addProjectDeletionToBulkIndexer(bulkIndexer, projectUuid); } } } return bulkIndexer.stop(); }
private void indexIssue(IssueDoc issue) { issueIndexer.index(Iterators.singletonIterator(issue)); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); ListMultimap<String, EsQueueDto> itemsByProjectKey = ArrayListMultimap.create(); items.forEach(i -> { if (ID_TYPE_ISSUE_KEY.equals(i.getDocIdType())) { itemsByIssueKey.put(i.getDocId(), i); } else if (ID_TYPE_PROJECT_UUID.equals(i.getDocIdType())) { itemsByProjectKey.put(i.getDocId(), i); } else { LOGGER.error("Unsupported es_queue.doc_id_type for issues. Manual fix is required: " + i); } }); IndexingResult result = new IndexingResult(); result.add(doIndexIssueItems(dbSession, itemsByIssueKey)); result.add(doIndexProjectItems(dbSession, itemsByProjectKey)); return result; }
@VisibleForTesting protected void index(Iterator<IssueDoc> issues) { doIndex(issues, Size.LARGE, IndexingListener.FAIL_ON_ERROR); }
private IndexingResult indexProject(String projectUuid, ProjectIndexer.Cause cause) { Collection<EsQueueDto> items = underTest.prepareForRecovery(db.getSession(), asList(projectUuid), cause); db.commit(); return underTest.index(db.getSession(), items); }
/** * This is a technical constraint, to ensure, that the indexers can be called in any order, during startup. */ @Test public void parent_child_relationship_does_not_require_ordering_of_index_requests() { IssueDoc issueDoc = new IssueDoc(); issueDoc.setKey("key"); issueDoc.setProjectUuid("parent-does-not-exist"); new IssueIndexer(es.client(), db.getDbClient(), new IssueIteratorFactory(db.getDbClient())) .index(asList(issueDoc).iterator()); assertThat(es.countDocuments(INDEX_TYPE_ISSUE)).isEqualTo(1L); }
public void deleteByKeys(String projectUuid, Collection<String> issueKeys) { if (issueKeys.isEmpty()) { return; } BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, IndexingListener.FAIL_ON_ERROR); bulkIndexer.start(); issueKeys.forEach(issueKey -> bulkIndexer.addDeletion(INDEX_TYPE_ISSUE, issueKey, projectUuid)); bulkIndexer.stop(); }
@Override public Collection<EsQueueDto> prepareForRecovery(DbSession dbSession, Collection<String> projectUuids, ProjectIndexer.Cause cause) { switch (cause) { case PROJECT_CREATION: // nothing to do, issues do not exist at project creation case MEASURE_CHANGE: case PROJECT_KEY_UPDATE: case PROJECT_TAGS_UPDATE: case PERMISSION_CHANGE: // nothing to do. Measures, permissions, project key and tags are not used in type issues/issue return emptyList(); case PROJECT_DELETION: List<EsQueueDto> items = projectUuids.stream() .map(projectUuid -> createQueueDto(projectUuid, ID_TYPE_PROJECT_UUID, projectUuid)) .collect(MoreCollectors.toArrayList(projectUuids.size())); return dbClient.esQueueDao().insert(dbSession, items); default: // defensive case throw new IllegalStateException("Unsupported cause: " + cause); } }
@Test public void test_getIndexTypes() { assertThat(underTest.getIndexTypes()).containsExactly(INDEX_TYPE_ISSUE); }
private IndexingResult doIndexProjectItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByProjectUuid) { if (itemsByProjectUuid.isEmpty()) { return new IndexingResult(); } // one project, referenced by es_queue.doc_id = many issues IndexingListener listener = new OneToManyResilientIndexingListener(dbClient, dbSession, itemsByProjectUuid.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); for (String projectUuid : itemsByProjectUuid.keySet()) { // TODO support loading of multiple projects in a single SQL request try (IssueIterator issues = issueIteratorFactory.createForProject(projectUuid)) { if (issues.hasNext()) { do { IssueDoc doc = issues.next(); bulkIndexer.add(newIndexRequest(doc)); } while (issues.hasNext()); } else { // project does not exist or has no issues. In both case // all the documents related to this project are deleted. addProjectDeletionToBulkIndexer(bulkIndexer, projectUuid); } } } return bulkIndexer.stop(); }
private void indexIssues() { issueIndexer.indexOnStartup(issueIndexer.getIndexTypes()); }
private void indexIssues(IssueDoc... issues) { issueIndexer.index(asList(issues).iterator()); authorizationIndexer.allow(stream(issues).map(issue -> new IndexPermissions(issue.projectUuid(), PROJECT).allowAnyone()).collect(toList())); } }
/** * Commits the DB transaction and adds the issues to Elasticsearch index. * <p> * If indexing fails, then the recovery daemon will retry later and this * method successfully returns. Meanwhile these issues will be "eventually * consistent" when requesting the index. */ public void commitAndIndexIssues(DbSession dbSession, Collection<IssueDto> issues) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); issues.stream() .map(issue -> createQueueDto(issue.getKey(), ID_TYPE_ISSUE_KEY, issue.getProjectUuid())) // a mutable ListMultimap is needed for doIndexIssueItems, so MoreCollectors.index() is // not used .forEach(i -> itemsByIssueKey.put(i.getDocId(), i)); dbClient.esQueueDao().insert(dbSession, itemsByIssueKey.values()); dbSession.commit(); doIndexIssueItems(dbSession, itemsByIssueKey); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); ListMultimap<String, EsQueueDto> itemsByProjectKey = ArrayListMultimap.create(); items.forEach(i -> { if (ID_TYPE_ISSUE_KEY.equals(i.getDocIdType())) { itemsByIssueKey.put(i.getDocId(), i); } else if (ID_TYPE_PROJECT_UUID.equals(i.getDocIdType())) { itemsByProjectKey.put(i.getDocId(), i); } else { LOGGER.error("Unsupported es_queue.doc_id_type for issues. Manual fix is required: " + i); } }); IndexingResult result = new IndexingResult(); result.add(doIndexIssueItems(dbSession, itemsByIssueKey)); result.add(doIndexProjectItems(dbSession, itemsByProjectKey)); return result; }
@Override public void indexOnStartup(Set<IndexType> uninitializedIndexTypes) { try (IssueIterator issues = issueIteratorFactory.createForAll()) { doIndex(issues, Size.LARGE, IndexingListener.FAIL_ON_ERROR); } }