private boolean validateFileHeader(Repository<Entity> repository) { return StreamSupport.stream(repository.getEntityType().getAttributes().spliterator(), false) .map(Attribute::getName) .anyMatch(name -> name.equalsIgnoreCase(SortaServiceImpl.DEFAULT_MATCHING_NAME_FIELD)); }
private boolean validateEmptyFileHeader(Repository<Entity> repository) { return StreamSupport.stream(repository.getEntityType().getAttributes().spliterator(), false) .map(Attribute::getName) .anyMatch(StringUtils::isNotBlank); }
private LoadingCache<Query<Entity>, List<Object>> getQueryCache(Repository<Entity> repository) { String id = repository.getEntityType().getId(); if (!caches.containsKey(id)) { caches.putIfAbsent(id, createQueryCache(repository)); } return caches.get(id); }
public Queue<RepositoryAnnotator> getAnnotatorSelectionDependencyList( List<RepositoryAnnotator> availableAnnotatorList, List<RepositoryAnnotator> requestedAnnotatorList, Repository<Entity> repo, EntityTypeFactory entityTypeFactory) { Queue<RepositoryAnnotator> sortedList = new LinkedList<>(); for (RepositoryAnnotator annotator : requestedAnnotatorList) { if (annotator instanceof EffectBasedAnnotator) { // FIXME: implement correct dependency resolving for Effect annotator sortedList.add(annotator); } else if (!sortedList.contains(annotator)) { requestedAnnotator = annotator; sortedList = getSingleAnnotatorDependencyList(annotator, availableAnnotatorList, sortedList, repo.getEntityType(), entityTypeFactory); } } return sortedList; }
/** * Retrieves an entity from the cache or the underlying repository. * * @param repository the underlying repository * @param id the ID of the entity to retrieve * @return the retrieved Entity, or null if the entity is not present. * @throws com.google.common.util.concurrent.UncheckedExecutionException if the repository throws * an error when loading the entity */ public Entity get(Repository<Entity> repository, Object id) { LoadingCache<Object, Optional<Map<String, Object>>> cache = getEntityCache(repository); EntityType entityType = repository.getEntityType(); return cache.getUnchecked(id).map(e -> entityHydration.hydrate(e, entityType)).orElse(null); }
/** * Gets the existing entity cache for a {@link Repository} or creates a new one if no cache exists * yet. * * @param repository the Repository used to create a new cache if none found, otherwise only the * id of the repository is used to look up the existing cache * @return the LoadingCache for the repository */ private LoadingCache<Object, Optional<Map<String, Object>>> getEntityCache( Repository<Entity> repository) { String id = repository.getEntityType().getId(); if (!caches.containsKey(id)) { caches.putIfAbsent(id, createEntityCache(repository)); } return caches.get(id); }
@Override @PreAuthorize("hasAnyRole('ROLE_SU, ROLE_PLUGIN_READ_indexmanager')") public List<EntityType> getIndexedEntities() { // collect indexed repos List<EntityType> indexedEntityTypeList = new ArrayList<>(); dataService .getEntityTypeIds() .forEach( entityTypeId -> { Repository<Entity> repository = dataService.getRepository(entityTypeId); if (repository != null && repository.getCapabilities().contains(RepositoryCapability.INDEXABLE)) { indexedEntityTypeList.add(repository.getEntityType()); } }); // sort indexed repos by entity label indexedEntityTypeList.sort(Comparator.comparing(EntityType::getLabel)); return indexedEntityTypeList; }
private long applyMappingsInternal(MappingTarget mappingTarget, Repository<Entity> targetRepo, Progress progress) { progress.status("Applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); long result = applyMappingsToRepositories(mappingTarget, targetRepo, progress); if (hasSelfReferences(targetRepo.getEntityType())) { progress.status("Self reference found, applying the mapping for a second time to set references"); applyMappingsToRepositories(mappingTarget, targetRepo, progress); } progress.status("Done applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); return result; }
private void processBatch(EntityMapping sourceMapping, Repository<Entity> targetRepo, Progress progress, AtomicLong counter, boolean canAdd, List<Entity> entities) { List<Entity> mappedEntities = mapEntities(sourceMapping, targetRepo.getEntityType(), entities); if (canAdd) { targetRepo.add(mappedEntities.stream()); } else { targetRepo.upsertBatch(mappedEntities); } progress.increment(1); counter.addAndGet(entities.size()); }
private void processBatch( EntityMapping sourceMapping, Repository<Entity> targetRepo, Progress progress, AtomicLong counter, boolean canAdd, List<Entity> entities, int depth) { List<Entity> mappedEntities = mapEntities(sourceMapping, targetRepo.getEntityType(), entities, depth); if (canAdd) { targetRepo.add(mappedEntities.stream()); } else { targetRepo.upsertBatch(mappedEntities); } progress.increment(1); counter.addAndGet(entities.size()); }
private long applyMappingsInternal( MappingTarget mappingTarget, Repository<Entity> targetRepo, Progress progress, int depth) { progress.status("Applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); long result = applyMappingsToRepositories(mappingTarget, targetRepo, progress, depth); if (hasSelfReferences(targetRepo.getEntityType())) { progress.status( "Self reference found, applying the mapping for a second time to set references"); applyMappingsToRepositories(mappingTarget, targetRepo, progress, depth); } progress.status( "Done applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); return result; }
/** * Retrieves a list of entities from the cache. If the cache doesn't yet exist, will create the * cache. * * @param repository the underlying repository, used to create the cache loader or to retrieve the * existing cache * @param ids {@link Iterable} of the ids of the entities to retrieve * @return List containing the retrieved entities, missing values are excluded * @throws RuntimeException if the cache failed to load the entities */ public List<Entity> getBatch(Repository<Entity> repository, Iterable<Object> ids) { try { return getEntityCache(repository) .getAll(ids) .values() .stream() .filter(Optional::isPresent) .map(Optional::get) .map(e -> entityHydration.hydrate(e, repository.getEntityType())) .collect(Collectors.toList()); } catch (ExecutionException exception) { // rethrow unchecked if (exception.getCause() != null && exception.getCause() instanceof RuntimeException) { throw (RuntimeException) exception.getCause(); } throw new MolgenisDataException(exception); } }
public void bootstrap() { if (!indexService.hasIndex(attrMetadata)) { LOG.debug( "No index for Attribute found, asuming missing index, schedule (re)index for all entities"); metaDataService .getRepositories() .forEach(repo -> indexActionRegisterService.register(repo.getEntityType(), null)); LOG.debug("Done scheduling (re)index jobs for all entities"); } else { LOG.debug("Index for Attribute found, index is present, no (re)index needed"); List<IndexJobExecution> failedIndexJobs = dataService .findAll( IndexJobExecutionMeta.INDEX_JOB_EXECUTION, new QueryImpl<IndexJobExecution>().eq(JobExecutionMetaData.STATUS, FAILED), IndexJobExecution.class) .collect(Collectors.toList()); failedIndexJobs.forEach(this::registerNewIndexActionForDirtyJobs); } }
if (annotator instanceof EffectCreatingAnnotator) entityTypeId = ((EffectCreatingAnnotator) annotator).getTargetEntityType(repository.getEntityType()) .getId();
private Repository<Entity> getTargetRepository(String entityTypeId, EntityType targetMetadata) { Repository<Entity> targetRepo; if (!dataService.hasRepository(entityTypeId)) { targetRepo = addTargetEntityType(targetMetadata); } else { targetRepo = dataService.getRepository(entityTypeId); compareTargetMetadatas(targetRepo.getEntityType(), targetMetadata); } return targetRepo; }
private Repository<Entity> getTargetRepository(String entityTypeId, EntityType targetMetadata) { Repository<Entity> targetRepo; if (!dataService.hasRepository(entityTypeId)) { targetRepo = addTargetEntityType(targetMetadata); } else { targetRepo = dataService.getRepository(entityTypeId); compareTargetMetadatas(targetRepo.getEntityType(), targetMetadata); } return targetRepo; }
private void createInputRepository(Repository<Entity> inputRepository) { // Add the original input dataset to database dataService.getMeta().addEntityType(inputRepository.getEntityType()); Repository<Entity> target = dataService.getRepository(inputRepository.getName()); inputRepository.forEachBatched(entities -> target.add(entities.stream()), BATCH_SIZE); }
/** * Creates a new Entity cache * * @param repository the {@link Repository} to load the entities from * @return newly created LoadingCache */ private LoadingCache<Object, Optional<Map<String, Object>>> createEntityCache( Repository<Entity> repository) { Caffeine<Object, Object> cacheBuilder = Caffeine.newBuilder().recordStats().expireAfterAccess(10, MINUTES); if (!MetaDataService.isMetaEntityType(repository.getEntityType())) { cacheBuilder.maximumSize(MAX_CACHE_SIZE_PER_ENTITY); } LoadingCache<Object, Optional<Map<String, Object>>> cache = CaffeinatedGuava.build(cacheBuilder, createCacheLoader(repository)); GuavaCacheMetrics.monitor(meterRegistry, cache, "l2." + repository.getEntityType().getId()); return cache; }
private LoadingCache<Query<Entity>, List<Object>> createQueryCache( Repository<Entity> repository) { LOG.trace("Creating Query cache for repository {}", repository.getName()); LoadingCache<Query<Entity>, List<Object>> cache = CaffeinatedGuava.build( Caffeine.newBuilder() .recordStats() .maximumSize(MAX_CACHE_SIZE_PER_QUERY) .expireAfterAccess(10, MINUTES), createCacheLoader(repository)); GuavaCacheMetrics.monitor(meterRegistry, cache, "l3." + repository.getEntityType().getId()); return cache; }
private SortaJobExecution createJobExecution( Repository<Entity> inputData, String jobName, String ontologyIri) { String resultEntityName = idGenerator.generateId(); SortaJobExecution sortaJobExecution = sortaJobExecutionFactory.create(); sortaJobExecution.setIdentifier(resultEntityName); sortaJobExecution.setName(jobName); User currentUser = userAccountService.getCurrentUser(); sortaJobExecution.setSourceEntityName(inputData.getName()); sortaJobExecution.setDeleteUrl(getSortaServiceMenuUrl() + "/delete/" + resultEntityName); sortaJobExecution.setResultEntityName(resultEntityName); sortaJobExecution.setThreshold(DEFAULT_THRESHOLD); sortaJobExecution.setOntologyIri(ontologyIri); RunAsSystemAspect.runAsSystem( () -> { createInputRepository(inputData); createEmptyResultRepository(jobName, resultEntityName, inputData.getEntityType()); }); EntityType resultEntityType = entityTypeFactory.create(resultEntityName); permissionSystemService.giveUserWriteMetaPermissions( asList(inputData.getEntityType(), resultEntityType)); return sortaJobExecution; }