private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
private void checkOntologyDefinitions(List<OntologyDefinitions> defList, List<ValidationError> errors, int level) { ValidationError error = null; for (OntologyDefinitions defs : defList) { for (ArchetypeTerm term : defs.getDefinitions()) { if (hasGreaterSpecialisationLevel(term.getCode(), level)) { error = new ValidationError(ErrorType.VONSD, null, term.getCode(), level); errors.add(error); } } } }
/** * Checks for codes in the ontology that are there more than once * * @param defList * @param errors * @param archetype */ private void checkForDoubleCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype) { // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { HashSet<String> foundCodes = new HashSet<String>(); for (ArchetypeTerm term : defs.getDefinitions()) { if (foundCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. error = new ValidationError(ErrorType.VOKU, null, term.getCode(), defs.getLanguage()); errors.add(error); } else { foundCodes.add(term.getCode()); } } } }
private void checkTermExistsInParent(Archetype parentArchetype, String code, List<ValidationError> errors) { String lang = parentArchetype.getOriginalLanguage().getCodeString(); List<OntologyDefinitions> defList = parentArchetype.getOntology().getTermDefinitionsList(); OntologyDefinitions priDefs = null; ValidationError error = null; for(OntologyDefinitions defs : defList) { if(lang.equals(defs.getLanguage())) { priDefs = defs; } } if(priDefs == null) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } else { List<ArchetypeTerm> terms = priDefs.getDefinitions(); Set<String> definedCodes = new LinkedHashSet<String>(); for(ArchetypeTerm term : terms) { definedCodes.add(term.getCode()); } if( ! definedCodes.contains(code)) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } } }
List<ArchetypeTerm> terms = priDefs.getDefinitions(); for (ArchetypeTerm term : terms) { definedCodesPrimLang.add(term.getCode()); List<ArchetypeTerm> terms = secDefs.getDefinitions(); Set<String> definedCodesSecLang = new LinkedHashSet<String>();
List<ArchetypeTerm> terms = priDefs.getDefinitions(); List<ArchetypeTerm> terms = secDefs.getDefinitions(); Set<String> definedCodesSecLang = new LinkedHashSet<String>();
out.write("items = <"); newline(out); for (ArchetypeTerm term : defs.getDefinitions()) { indent(4, out); out.write("[");
out.write("items = <"); newline(out); for (ArchetypeTerm term : defs.getDefinitions()) { indent(4, out); out.write("[");
trmDefinitions.setAttribute("language", defs.getLanguage()); for (ArchetypeTerm term : defs.getDefinitions()) { Element items = new Element("items", defaultNamespace); trmDefinitions.getChildren().add(items); consDefinitions.setAttribute("language", defs.getLanguage()); for (ArchetypeTerm term : defs.getDefinitions()) { Element items = new Element("items", defaultNamespace); consDefinitions.getChildren().add(items);
/** * Checks for unused codes in the ontology * * @param defList * @param errors * @param archetype */ private void checkForUnusedCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype, Set<String> actuallyUsedCodes) { int specialisationDepth = StringUtils.countMatches(archetype.getArchetypeId().domainConcept(), "-"); // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { for (ArchetypeTerm term : defs.getDefinitions()) { if (!actuallyUsedCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. if (specialisationDepth == StringUtils.countMatches(term.getCode(), ".")) { error = new ValidationError(ErrorType.WOUC, null, term.getCode(), defs.getLanguage()); errors.add(error); } } } } }
public Collection<ArchetypeTermVO> generateArchetypeTerms() { Collection<ArchetypeTermVO> archetypeTermVOs = new ArrayList<>(); ArchetypeOntology ao = ar.getOntology(); List<OntologyDefinitions> ods = ao.getTermDefinitionsList(); for (OntologyDefinitions od : ods) { String lang = od.getLanguage(); List<ArchetypeTerm> archetypeTerms = od.getDefinitions(); for (ArchetypeTerm archetypeTerm : archetypeTerms) { archetypeTermVOs.add( ArchetypeTermVO.builder() .archetypeId(ar.getArchetypeId().getValue()) .code(archetypeTerm.getCode()) .language(lang) .text(archetypeTerm.getText()) .description(archetypeTerm.getDescription()) .build()); } } return archetypeTermVOs; } }