private Set<String> retrieveLanguageSet(List<OntologyDefinitions> list) { Set<String> set = new LinkedHashSet<String>(); for (OntologyDefinitions defs : list) { set.add(defs.getLanguage()); } return set; }
private void checkDuplicateLanguage(List<ValidationError> errors, List<OntologyDefinitions> ontDefsList) { ArrayList<String> checkedLanguages = new ArrayList<>(); for (OntologyDefinitions ontDefs : ontDefsList) { if (!checkedLanguages.contains(ontDefs.getLanguage())) { checkedLanguages.add(ontDefs.getLanguage()); } else { errors.add(new ValidationError(ErrorType.VDL, null, ontDefs.getLanguage())); } } }
private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
/** * Checks for codes in the ontology that are there more than once * * @param defList * @param errors * @param archetype */ private void checkForDoubleCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype) { // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { HashSet<String> foundCodes = new HashSet<String>(); for (ArchetypeTerm term : defs.getDefinitions()) { if (foundCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. error = new ValidationError(ErrorType.VOKU, null, term.getCode(), defs.getLanguage()); errors.add(error); } else { foundCodes.add(term.getCode()); } } } }
private void checkTermExistsInParent(Archetype parentArchetype, String code, List<ValidationError> errors) { String lang = parentArchetype.getOriginalLanguage().getCodeString(); List<OntologyDefinitions> defList = parentArchetype.getOntology().getTermDefinitionsList(); OntologyDefinitions priDefs = null; ValidationError error = null; for(OntologyDefinitions defs : defList) { if(lang.equals(defs.getLanguage())) { priDefs = defs; } } if(priDefs == null) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } else { List<ArchetypeTerm> terms = priDefs.getDefinitions(); Set<String> definedCodes = new LinkedHashSet<String>(); for(ArchetypeTerm term : terms) { definedCodes.add(term.getCode()); } if( ! definedCodes.contains(code)) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } } }
if (lang.equals(defs.getLanguage())) { priDefs = defs; } else { code, secDefs.getLanguage()); errors.add(error);
if (lang.equals(defs.getLanguage())) { priDefs = defs; } else { code, secDefs.getLanguage()); errors.add(error);
indent(2, out); out.write("["); out.write(quoteString(defs.getLanguage())); out.write("] = <"); newline(out);
indent(2, out); out.write("["); out.write(quoteString(defs.getLanguage())); out.write("] = <"); newline(out);
Element trmDefinitions = new Element("term_definitions", defaultNamespace); onto.getChildren().add(trmDefinitions); trmDefinitions.setAttribute("language", defs.getLanguage()); Element consDefinitions = new Element("constraint_definitions", defaultNamespace); onto.getChildren().add(consDefinitions); consDefinitions.setAttribute("language", defs.getLanguage());
/** * Checks for unused codes in the ontology * * @param defList * @param errors * @param archetype */ private void checkForUnusedCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype, Set<String> actuallyUsedCodes) { int specialisationDepth = StringUtils.countMatches(archetype.getArchetypeId().domainConcept(), "-"); // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { for (ArchetypeTerm term : defs.getDefinitions()) { if (!actuallyUsedCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. if (specialisationDepth == StringUtils.countMatches(term.getCode(), ".")) { error = new ValidationError(ErrorType.WOUC, null, term.getCode(), defs.getLanguage()); errors.add(error); } } } } }
public Collection<ArchetypeTermVO> generateArchetypeTerms() { Collection<ArchetypeTermVO> archetypeTermVOs = new ArrayList<>(); ArchetypeOntology ao = ar.getOntology(); List<OntologyDefinitions> ods = ao.getTermDefinitionsList(); for (OntologyDefinitions od : ods) { String lang = od.getLanguage(); List<ArchetypeTerm> archetypeTerms = od.getDefinitions(); for (ArchetypeTerm archetypeTerm : archetypeTerms) { archetypeTermVOs.add( ArchetypeTermVO.builder() .archetypeId(ar.getArchetypeId().getValue()) .code(archetypeTerm.getCode()) .language(lang) .text(archetypeTerm.getText()) .description(archetypeTerm.getDescription()) .build()); } } return archetypeTermVOs; } }