switch (attribute.getDataType()) { case DATE: return entity.getLocalDate(attributeName); case DATE_TIME: return entity.getInstant(attributeName); case BOOL: return entity.getBoolean(attributeName); case DECIMAL: return entity.getDouble(attributeName); case LONG: return entity.getLong(attributeName); case INT: return entity.getInt(attributeName); case HYPERLINK: case ENUM: case EMAIL: case STRING: return entity.getString(attributeName); case CATEGORICAL: case XREF: case FILE: Entity refEntity = entity.getEntity(attributeName); if (refEntity != null) return refEntity.getIdValue(); else return ""; case CATEGORICAL_MREF: case MREF: List<String> mrefValues = newArrayList(); for (Entity mrefEntity : entity.getEntities(attributeName)) {
NumberFormat format = NumberFormat.getNumberInstance(); format.setMaximumFractionDigits(2); Entity inputEntity = resultEntity.getEntity(MatchingTaskContentMetaData.INPUT_TERM); Entity ontologyTermEntity = sortaService.getOntologyTermEntity( resultEntity.getString(MatchingTaskContentMetaData.MATCHED_TERM), sortaJobExecution.getOntologyIri()); .getAttributeNames() .forEach( attributeName -> { if (!attributeName.equalsIgnoreCase(SortaCsvRepository.ALLOWED_IDENTIFIER)) { row.set(attributeName, inputEntity.get(attributeName)); row.set( OntologyTermMetaData.ONTOLOGY_TERM_NAME, ontologyTermEntity.getString(OntologyTermMetaData.ONTOLOGY_TERM_NAME)); row.set( OntologyTermMetaData.ONTOLOGY_TERM_IRI, ontologyTermEntity.getString(OntologyTermMetaData.ONTOLOGY_TERM_IRI)); row.set( MatchingTaskContentMetaData.VALIDATED, resultEntity.getBoolean(MatchingTaskContentMetaData.VALIDATED)); Double score = resultEntity.getDouble(MatchingTaskContentMetaData.SCORE); if (score != null) { row.set(MatchingTaskContentMetaData.SCORE, format.format(score));
private void setFailed(Entity jobExecutionEntity) { jobExecutionEntity.set(STATUS, FAILED.toString()); jobExecutionEntity.set(PROGRESS_MESSAGE, "Application terminated unexpectedly"); StringBuilder log = new StringBuilder(); if (!isEmpty(jobExecutionEntity.get(LOG))) { log.append(jobExecutionEntity.get(LOG)); log.append('\n'); } log.append("FAILED - Application terminated unexpectedly"); String abbreviatedLog = abbreviateMiddle(log.toString(), "...\n" + TRUNCATION_BANNER + "\n...", MAX_LOG_LENGTH); jobExecutionEntity.set(LOG, abbreviatedLog); dataService.update(jobExecutionEntity.getEntityType().getId(), jobExecutionEntity); }
@Override public void validateEntityNotReferenced(Entity entity) { validateEntityNotReferencedById(entity.getIdValue(), entity.getEntityType()); }
private boolean qualifiedNodePath(String nodePath, Entity entity) { Iterable<Entity> nodePathEntities = entity.getEntities(OntologyTermMetaData.ONTOLOGY_TERM_NODE_PATH); return Lists.newArrayList(nodePathEntities) .stream() .anyMatch( nodePathEntity -> { String childNodePath = nodePathEntity.getString(OntologyTermNodePathMetaData.NODE_PATH); return !StringUtils.equals(nodePath, childNodePath) && childNodePath.startsWith(nodePath); }); }
@Override public void removeAttributeTag(String entity, String attribute, String relationIRI, String ontologyTermIRI) { Entity attributeEntity = findAttributeEntity(entity, attribute); Iterable<Entity> tags = attributeEntity.getEntities(AttributeMetadata.TAGS); Iterable<Entity> newTags = Iterables.filter(tags, e -> !isSameTag(relationIRI, ontologyTermIRI, e)); attributeEntity.set(AttributeMetadata.TAGS, newTags); dataService.update(ATTRIBUTE_META_DATA, attributeEntity); updateEntityTypeEntityWithNewAttributeEntity(entity, attribute, attributeEntity); }
/** * Creates a fully reconstructed MappingProject from an Entity retrieved from the repository. * * @param mappingProjectEntity Entity with {@link MappingProjectMetaData} metadata * @return fully reconstructed MappingProject */ private MappingProject toMappingProject(Entity mappingProjectEntity) { String identifier = mappingProjectEntity.getString(MappingProjectMetaData.IDENTIFIER); String name = mappingProjectEntity.getString(MappingProjectMetaData.NAME); User owner = mappingProjectEntity.getEntity(MappingProjectMetaData.OWNER, User.class); List<Entity> mappingTargetEntities = Lists .newArrayList(mappingProjectEntity.getEntities(MappingProjectMetaData.MAPPING_TARGETS)); List<MappingTarget> mappingTargets = mappingTargetRepo.toMappingTargets(mappingTargetEntities); return new MappingProject(identifier, name, owner, mappingTargets); }
private void setGoNLFrequencies(Entity entity, List<Entity> refMatches) { String afs = null; String gtcs = null; if (hasAltAttribute(entity)) { List<Entity> alleleMatches = computeAlleleMatches(entity, refMatches); if (alleleMatches.stream().anyMatch(Objects::nonNull)) { afs = alleleMatches.stream() .map(gonl -> gonl == null ? "." : Double.toString( Double.valueOf(gonl.getString(INFO_AC)) / gonl.getInt(INFO_AN))) .collect(Collectors.joining(",")); //update GTC field to separate allele combinations by pipe instead of comma, since we use comma to separate alt allele info gtcs = alleleMatches.stream() .map(gonl -> gonl == null ? "." : gonl.getString(INFO_GTC).replace(",", "|")) .collect(Collectors.joining(",")); } } entity.set(GONL_GENOME_AF, afs); entity.set(GONL_GENOME_GTC, gtcs); }
/** * Creates a fully reconstructed MappingProject from an Entity retrieved from the repository. * * @param mappingProjectEntity Entity with {@link MappingProjectMetaData} metadata * @return fully reconstructed MappingProject */ private MappingProject toMappingProject(Entity mappingProjectEntity) { String identifier = mappingProjectEntity.getString(MappingProjectMetaData.IDENTIFIER); String name = mappingProjectEntity.getString(MappingProjectMetaData.NAME); int depth = Optional.ofNullable(mappingProjectEntity.getInt(DEPTH)).orElse(3); List<Entity> mappingTargetEntities = Lists.newArrayList( mappingProjectEntity.getEntities(MappingProjectMetaData.MAPPING_TARGETS)); List<MappingTarget> mappingTargets = mappingTargetRepo.toMappingTargets(mappingTargetEntities); return new MappingProject(identifier, name, depth, mappingTargets); }
private void addRelationForXrefTypeAttribute( Model model, Resource subject, IRI predicate, Entity objectEntity) { if (contains(objectEntity.getEntityType().getAttributeNames(), "IRI")) { model.add(subject, predicate, valueFactory.createIRI(objectEntity.getString("IRI"))); } else { model.add( subject, predicate, valueFactory.createIRI(subject.stringValue() + '/' + objectEntity.getIdValue())); } }
private String concatAttributeNameValue(Attribute attribute) { String value; if (attribute.getName().equals(VARIANT)) { value = getFailedEntity().getEntity(VARIANT).getIdValue().toString(); } else { value = getFailedEntity().get(attribute.getName()).toString(); } return attribute.getName() + "=" + value; } }
@Override public Query<Entity> createQuery(Entity entity) { Object value = entity.get(EffectsMetaData.GENE_NAME); return EQ(EffectsMetaData.GENE_NAME, value); } }
@Override public Entity next() { Entity entity = iterator.next(); if (isEmpty(entity.getString(ALLOWED_IDENTIFIER))) { DynamicEntity dynamicEntity = new DynamicEntity(getEntityType()); dynamicEntity.set(entity); entity = dynamicEntity; entity.set(ALLOWED_IDENTIFIER, String.valueOf(count.incrementAndGet())); } return entity; } };
private void tryUpdate(JobExecution jobExecution) { Entity jobExecutionCopy = new DynamicEntity(jobExecution.getEntityType()); jobExecutionCopy.set(jobExecution); try { dataService.update(jobExecutionCopy.getEntityType().getId(), jobExecutionCopy); } catch (Exception ex) { LOG.warn("Error updating job execution", ex); } } }
Entity addLexicalScoreToMatchedEntity( Entity inputEntity, Entity ontologyTerm, String ontologyIri) // TODO Change 'Entity ontologyTerm' to 'OntologyTerm ontologyTerm' { double maxNgramScore = 0; double maxNgramIDFScore = 0; for (String inputAttrName : inputEntity.getAttributeNames()) { String queryString = inputEntity.getString(inputAttrName); if (StringUtils.isNotEmpty(queryString) && isAttrNameValidForLexicalMatch(inputAttrName)) { Entity topMatchedSynonymEntity = findSynonymWithHighestNgramScore(ontologyIri, queryString, ontologyTerm); if (maxNgramScore < topMatchedSynonymEntity.getDouble(SCORE)) { maxNgramScore = topMatchedSynonymEntity.getDouble(SCORE); } if (maxNgramIDFScore < topMatchedSynonymEntity.getDouble(COMBINED_SCORE)) { maxNgramIDFScore = topMatchedSynonymEntity.getDouble(COMBINED_SCORE); } } } OntologyTermHitEntity mapEntity = new OntologyTermHitEntity(ontologyTerm, ontologyTermHitMetaData); mapEntity.set(SCORE, maxNgramScore); mapEntity.set(COMBINED_SCORE, maxNgramIDFScore); return mapEntity; }
private JsonElement serializeReference(Entity entity, JsonSerializationContext context) { JsonObject result = new JsonObject(); result.addProperty("__entityTypeId", entity.getEntityType().getId()); result.add("__idValue", context.serialize(entity.getIdValue())); result.add("__labelValue", context.serialize(entity.getLabelValue())); return result; }
/** * Evict all entity instances referenced by this entity instance through a bidirectional relation. * * @param entity the entity whose references need to be evicted */ private void evictBiDiReferencedEntities(Entity entity) { Stream<EntityKey> backreffingEntities = getEntityType() .getMappedByAttributes() .flatMap( mappedByAttr -> stream(entity.getEntities(mappedByAttr.getName()).spliterator(), false)) .map(EntityKey::create); Stream<EntityKey> manyToOneEntities = getEntityType() .getInversedByAttributes() .map(inversedByAttr -> entity.getEntity(inversedByAttr.getName())) .filter(Objects::nonNull) .map(EntityKey::create); l1Cache.evict(Stream.concat(backreffingEntities, manyToOneEntities)); } }