/** Checks if an entity exists. */ @GetMapping(value = "/{entityTypeId}/exist", produces = APPLICATION_JSON_VALUE) public boolean entityExists(@PathVariable("entityTypeId") String entityTypeId) { try { dataService.getRepository(entityTypeId); return true; } catch (UnknownEntityTypeException e) { return false; } }
private void upsertScheduledJobTypes() { dataService .getRepository(SCHEDULED_JOB_TYPE, ScheduledJobType.class) .upsertBatch(scheduledJobTypes); }
@Override @PreAuthorize("hasAnyRole('ROLE_SU, ROLE_PLUGIN_READ_indexmanager')") public List<EntityType> getIndexedEntities() { // collect indexed repos List<EntityType> indexedEntityTypeList = new ArrayList<>(); dataService .getEntityTypeIds() .forEach( entityTypeId -> { Repository<Entity> repository = dataService.getRepository(entityTypeId); if (repository != null && repository.getCapabilities().contains(RepositoryCapability.INDEXABLE)) { indexedEntityTypeList.add(repository.getEntityType()); } }); // sort indexed repos by entity label indexedEntityTypeList.sort(Comparator.comparing(EntityType::getLabel)); return indexedEntityTypeList; }
public void populate(ApplicationContext ctx) { Map<String, Plugin> newOrChangedPluginMap = getPlugins(ctx); List<Plugin> deletedPlugins = new ArrayList<>(); Map<String, Plugin> existingPluginMap = dataService .findAll(PLUGIN, Plugin.class) .collect(toMap(Plugin::getId, Function.identity())); existingPluginMap.forEach( (pluginId, plugin) -> { if (newOrChangedPluginMap.get(pluginId) == null && !pluginId.startsWith(APP_PREFIX)) { deletedPlugins.add(plugin); } }); if (!newOrChangedPluginMap.isEmpty()) { dataService .getRepository(PLUGIN, Plugin.class) .upsertBatch(newArrayList(newOrChangedPluginMap.values())); } if (!deletedPlugins.isEmpty()) { dataService.delete(PLUGIN, deletedPlugins.stream()); } }
@Override @PreAuthorize("hasAnyRole('ROLE_SU')") public void rebuildIndex(String entityTypeId) { Repository<Entity> repository = dataService.getRepository(entityTypeId); if (!repository.getCapabilities().contains(RepositoryCapability.INDEXABLE)) { throw new MolgenisDataAccessException( "Repository [" + entityTypeId + "] is not an indexed repository"); } indexService.rebuildIndex(repository); } }
/** * Create a new merged repository Metadata is merged based on the common attributes (those remain at root level) All * non-common level attributes are organised in 1 compound attribute per repository Data of all repositories is * merged based on the common columns * * @param repositoryList * list of repositories to be merged * @param commonAttributes * list of common attributes, these columns are use to 'join'/'merge' on * @param mergedRepository * the resulting repository * @param batchSize * number of records after which the result is added or updated in the repository * @return mergedRepository ElasticSearchRepository containing the merged data */ public Repository merge(List<Repository> repositoryList, List<AttributeMetaData> commonAttributes, Repository mergedRepository, int batchSize) { mergeData(repositoryList, dataService.getRepository(mergedRepository.getName()), commonAttributes, batchSize); return mergedRepository; }
/** * Tests an algoritm by computing it for all entities in the source repository. * * @param mappingServiceRequest the {@link MappingServiceRequest} sent by the client * @return Map with the results and size of the source */ @RequestMapping(method = RequestMethod.POST, value = "/mappingattribute/testscript", consumes = APPLICATION_JSON_VALUE, produces = APPLICATION_JSON_VALUE) @ResponseBody public Map<String, Object> testScript(@RequestBody MappingServiceRequest mappingServiceRequest) { EntityType targetEntityType = dataService.getEntityType(mappingServiceRequest.getTargetEntityName()); Attribute targetAttribute = targetEntityType != null ? targetEntityType .getAttribute(mappingServiceRequest.getTargetAttributeName()) : null; Repository<Entity> sourceRepo = dataService.getRepository(mappingServiceRequest.getSourceEntityName()); Iterable<AlgorithmEvaluation> algorithmEvaluations = algorithmService .applyAlgorithm(targetAttribute, mappingServiceRequest.getAlgorithm(), sourceRepo); List<Object> calculatedValues = newArrayList( Iterables.transform(algorithmEvaluations, AlgorithmEvaluation::getValue)); return ImmutableMap.of("results", calculatedValues, "totalCount", Iterables.size(sourceRepo)); }
long applyMappingToRepo(EntityMapping sourceMapping, Repository<Entity> targetRepo, Progress progress) { progress.status(format("Mapping source [%s]...", sourceMapping.getLabel())); AtomicLong counter = new AtomicLong(); boolean canAdd = targetRepo.count() == 0; dataService.getRepository(sourceMapping.getName()).forEachBatched( entities -> processBatch(sourceMapping, targetRepo, progress, counter, canAdd, entities), MAPPING_BATCH_SIZE); progress.status(format("Mapped %s [%s] entities.", counter, sourceMapping.getLabel())); return counter.get(); }
private EntityType copyEntities(EntityType copy, CopyState state) { String originalEntityTypeId = state.originalEntityTypeIds().get(copy.getId()); if (!copy.isAbstract()) { dataService .getRepository(originalEntityTypeId) .forEachBatched(batch -> addEntityBatch(copy, state, batch), BATCH_SIZE); } return copy; }
/** Package-private for testability */ long applyMappingToRepo( EntityMapping sourceMapping, Repository<Entity> targetRepo, Progress progress, int depth) { progress.status(format("Mapping source [%s]...", sourceMapping.getLabel())); AtomicLong counter = new AtomicLong(); boolean canAdd = targetRepo.count() == 0; dataService .getRepository(sourceMapping.getName()) .forEachBatched( entities -> processBatch(sourceMapping, targetRepo, progress, counter, canAdd, entities, depth), MAPPING_BATCH_SIZE); progress.status(format("Mapped %s [%s] entities.", counter, sourceMapping.getLabel())); return counter.get(); }
private Repository<Entity> getTargetRepository(String entityTypeId, EntityType targetMetadata) { Repository<Entity> targetRepo; if (!dataService.hasRepository(entityTypeId)) { targetRepo = addTargetEntityType(targetMetadata); } else { targetRepo = dataService.getRepository(entityTypeId); compareTargetMetadatas(targetRepo.getEntityType(), targetMetadata); } return targetRepo; }
@Override @Transactional public EntityImportReport doImport( RepositoryCollection source, MetadataAction metadataAction, DataAction dataAction, @Nullable String packageId) { if (dataAction != DataAction.ADD) { throw new IllegalArgumentException("Only ADD is supported"); } EntityImportReport report = new EntityImportReport(); for (String entityTypeId : source.getEntityTypeIds()) { try (Repository<Entity> sourceRepository = source.getRepository(entityTypeId)) { Repository<Entity> targetRepository = dataService.getRepository(entityTypeId); Integer count = targetRepository.add(asStream(sourceRepository)); report.addEntityCount(entityTypeId, count); } catch (IOException e) { LOG.error("", e); throw new MolgenisDataException(e); } } return report; }
/** * Tests an algoritm by computing it for all entities in the source repository. * * @param mappingServiceRequest the {@link MappingServiceRequest} sent by the client * @return Map with the results and size of the source */ @PostMapping( value = "/mappingattribute/testscript", consumes = APPLICATION_JSON_VALUE, produces = APPLICATION_JSON_VALUE) @ResponseBody public Map<String, Object> testScript(@RequestBody MappingServiceRequest mappingServiceRequest) { EntityType targetEntityType = dataService.getEntityType(mappingServiceRequest.getTargetEntityName()); Attribute targetAttribute = targetEntityType != null ? targetEntityType.getAttribute(mappingServiceRequest.getTargetAttributeName()) : null; Repository<Entity> sourceRepo = dataService.getRepository(mappingServiceRequest.getSourceEntityName()); Iterable<AlgorithmEvaluation> algorithmEvaluations = algorithmService.applyAlgorithm( targetAttribute, mappingServiceRequest.getAlgorithm(), sourceRepo, mappingServiceRequest.getDepth()); List<Object> calculatedValues = newArrayList(Iterables.transform(algorithmEvaluations, AlgorithmEvaluation::getValue)); return ImmutableMap.of("results", calculatedValues, "totalCount", Iterables.size(sourceRepo)); }
private Repository<Entity> getTargetRepository(String entityTypeId, EntityType targetMetadata) { Repository<Entity> targetRepo; if (!dataService.hasRepository(entityTypeId)) { targetRepo = addTargetEntityType(targetMetadata); } else { targetRepo = dataService.getRepository(entityTypeId); compareTargetMetadatas(targetRepo.getEntityType(), targetMetadata); } return targetRepo; }
private void createInputRepository(Repository<Entity> inputRepository) { // Add the original input dataset to database dataService.getMeta().addEntityType(inputRepository.getEntityType()); Repository<Entity> target = dataService.getRepository(inputRepository.getName()); inputRepository.forEachBatched(entities -> target.add(entities.stream()), BATCH_SIZE); }
/** * Cleans up successful IndexJobExecutions that finished longer than five minutes ago. delay for a * minute to allow the transaction manager to become available */ @Scheduled(initialDelay = 1 * 60 * 1000, fixedRate = 5 * 60 * 1000) public void cleanupJobExecutions() { runAsSystem( () -> { LOG.trace("Clean up Index job executions..."); Instant fiveMinutesAgo = Instant.now().minus(5, ChronoUnit.MINUTES); boolean indexJobExecutionExists = dataService.hasRepository(IndexJobExecutionMeta.INDEX_JOB_EXECUTION); if (indexJobExecutionExists) { Stream<Entity> executions = dataService .getRepository(IndexJobExecutionMeta.INDEX_JOB_EXECUTION) .query() .lt(END_DATE, fiveMinutesAgo) .and() .eq(STATUS, SUCCESS.toString()) .findAll(); dataService.delete(IndexJobExecutionMeta.INDEX_JOB_EXECUTION, executions); LOG.debug("Cleaned up Index job executions."); } else { LOG.warn("{} does not exist", IndexJobExecutionMeta.INDEX_JOB_EXECUTION); } }); } }
@Override public Style addStyle( String styleId, String bootstrap3FileName, InputStream bootstrap3StyleData, String bootstrap4FileName, InputStream bootstrap4StyleData) throws MolgenisStyleException { if (dataService.getRepository(STYLE_SHEET).findOneById(styleId) != null) { throw new MolgenisStyleException( String.format("A style with the same identifier (%s) already exists", styleId)); } StyleSheet styleSheet = styleSheetFactory.create(styleId); styleSheet.setName(styleId); FileMeta bootstrap3ThemeFileMeta = createStyleSheetFileMeta(bootstrap3FileName, bootstrap3StyleData); styleSheet.setBootstrap3Theme(bootstrap3ThemeFileMeta); // Setting the bootstrap 4 style is optional if (bootstrap4FileName != null && bootstrap4StyleData != null) { FileMeta bootstrap4ThemeFileMeta = createStyleSheetFileMeta(bootstrap4FileName, bootstrap4StyleData); styleSheet.setBootstrap4Theme(bootstrap4ThemeFileMeta); } dataService.add(STYLE_SHEET, styleSheet); return Style.createLocal(styleSheet.getName()); }
} else { progress.progress(progressCount, format("Indexing {0}", entityType.getId())); final Repository<Entity> repository = dataService.getRepository(entityType.getId()); indexService.rebuildIndex(repository);
annotator))); iterateOverEntitiesAndAnnotate(dataService.getRepository(repository.getName()), annotator, action);
throw new UnknownEntityTypeException(entityTypeId); Repository<Entity> repository = dataService.getRepository(entityTypeId);