private void checkOntologyDefinitions(List<OntologyDefinitions> defList, List<ValidationError> errors, int level) { ValidationError error = null; for (OntologyDefinitions defs : defList) { for (ArchetypeTerm term : defs.getDefinitions()) { if (hasGreaterSpecialisationLevel(term.getCode(), level)) { error = new ValidationError(ErrorType.VONSD, null, term.getCode(), level); errors.add(error); } } } }
private static String getText(Archetype ar, String codeStr, String language) { ArchetypeTerm term = getArchetypeTerm(ar, codeStr, language); if (term != null) { return term.getText(); } else { return null; } }
/** * Convenience constructor that calls the other constructor [ArchetypeTerm(String code)] * and then adds two items to the hashmap using the keys described in parameters below. * @param code not null or empty (atNNNN or acNNNN codes depending on usage context) * @param text the String that will be stored in the item map under the key "text" * @param description the String that will be stored in the item map under the key "description" */ public ArchetypeTerm(String code, String text, String description){ this(code); addItem(TEXT, text); addItem(DESCRIPTION, description); }
public Collection<ArchetypeTermVO> generateArchetypeTerms() { Collection<ArchetypeTermVO> archetypeTermVOs = new ArrayList<>(); ArchetypeOntology ao = ar.getOntology(); List<OntologyDefinitions> ods = ao.getTermDefinitionsList(); for (OntologyDefinitions od : ods) { String lang = od.getLanguage(); List<ArchetypeTerm> archetypeTerms = od.getDefinitions(); for (ArchetypeTerm archetypeTerm : archetypeTerms) { archetypeTermVOs.add( ArchetypeTermVO.builder() .archetypeId(ar.getArchetypeId().getValue()) .code(archetypeTerm.getCode()) .language(lang) .text(archetypeTerm.getText()) .description(archetypeTerm.getDescription()) .build()); } } return archetypeTermVOs; } }
if (!archetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()).equals( parentArchetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()))) { if (archetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()) !=null) { log.debug("child desc: "+archetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()).getText()); log.debug("child desc: "+archetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()).getDescription()); log.debug("parent desc: "+ parentArchetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()).getText()); log.debug("parent desc: "+ parentArchetype.getOntology().termDefinition(langPrim, ccobj.getNodeId()).getDescription()); if (!archetype.getOntology().termDefinition(trans.getValue().getLanguage().getCodeString(), ccobj.getNodeId()).equals( parentArchetype.getOntology().termDefinition(trans.getValue().getLanguage().getCodeString(), ccobj.getNodeId()))) {
jj_consume_token(SYM_L_BRACKET); code = local_code_value(); term = new ArchetypeTerm(code); jj_consume_token(SYM_R_BRACKET); jj_consume_token(SYM_EQ); jj_consume_token(SYM_LT); value = string_value(); term.addItem("text", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { jj_consume_token(SYM_LT); value = string_value(); term.addItem("description", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { jj_consume_token(SYM_LT); value = string_value(); term.addItem("comment", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { value = string_value(); key = t.image; term.addItem(key, value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
private static String getDescription(Archetype ar, String codeStr, String language) { ArchetypeTerm term = getArchetypeTerm(ar, codeStr, language); if (term != null) { return term.getDescription(); } else { return null; } }
jj_consume_token(SYM_L_BRACKET); code = local_code_value(); term = new ArchetypeTerm(code); jj_consume_token(SYM_R_BRACKET); jj_consume_token(SYM_EQ); jj_consume_token(SYM_LT); value = string_value(); term.addItem("text", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { jj_consume_token(SYM_LT); value = string_value(); term.addItem("description", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { jj_consume_token(SYM_LT); value = string_value(); term.addItem("comment", value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { value = string_value(); key = t.image; term.addItem(key, value); jj_consume_token(SYM_GT); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
private String getDescription(Archetype ar, CObject constrainedObj, String language) { String desc = getDescription(ar, constrainedObj.getNodeId(), language); if (desc != null) { return desc; } else { Archetype archetype = archetypeMap.get(constrainedObj.getNodeId()); if (archetype != null) { String conceptCode = archetype.getConcept(); ArchetypeTerm term = archetype.getOntology().termDefinition(language, conceptCode); desc = term.getDescription(); } else { desc = constrainedObj.getNodeId(); } return desc; } }
/** * Checks for codes in the ontology that are there more than once * * @param defList * @param errors * @param archetype */ private void checkForDoubleCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype) { // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { HashSet<String> foundCodes = new HashSet<String>(); for (ArchetypeTerm term : defs.getDefinitions()) { if (foundCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. error = new ValidationError(ErrorType.VOKU, null, term.getCode(), defs.getLanguage()); errors.add(error); } else { foundCodes.add(term.getCode()); } } } }
Element items = new Element("items", defaultNamespace); trmDefinitions.getChildren().add(items); items.setAttribute("code", term.getCode()); printStringMap("items", term.getItems(), items); Element items = new Element("items", defaultNamespace); consDefinitions.getChildren().add(items); items.setAttribute("code", term.getCode()); printStringMap("items", term.getItems(), items);
String retrieveArchetypeTermText(String nodeId, Archetype archetype) throws Exception { String language = archetype.getOriginalLanguage().getCodeString(); ArchetypeTerm term = archetype.getOntology().termDefinition(language, nodeId); if(term == null) { throw new Exception("term of given code: " + nodeId + ", language: " + language + " not found.."); } return term.getText(); }
/** * Convenience constructor that calls the other constructor [ArchetypeTerm(String code)] * and then adds two items to the hashmap using the keys described in parameters below. * @param code not null or empty (atNNNN or acNNNN codes depending on usage context) * @param text the String that will be stored in the item map under the key "text" * @param description the String that will be stored in the item map under the key "description" */ public ArchetypeTerm(String code, String text, String description){ this(code); addItem(TEXT, text); addItem(DESCRIPTION, description); }
private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
private void loadDefs(Map<String, Map<String, ArchetypeTerm>> map, List<OntologyDefinitions> list) { if (list == null) { return; } Map<String, ArchetypeTerm> codeMap = null; for (OntologyDefinitions defs : list) { codeMap = map.get(defs.getLanguage()); if (null == codeMap) { codeMap = new HashMap<String, ArchetypeTerm>(); } for (ArchetypeTerm item : defs.getDefinitions()) { codeMap.put(item.getCode(), item); } map.put(defs.getLanguage(), codeMap); } }
/** * Checks for unused codes in the ontology * * @param defList * @param errors * @param archetype */ private void checkForUnusedCodes(List<OntologyDefinitions> defList, List<ValidationError> errors, Archetype archetype, Set<String> actuallyUsedCodes) { int specialisationDepth = StringUtils.countMatches(archetype.getArchetypeId().domainConcept(), "-"); // now check for each code if it exists in the definition ValidationError error = null; for (OntologyDefinitions defs : defList) { for (ArchetypeTerm term : defs.getDefinitions()) { if (!actuallyUsedCodes.contains(term.getCode())) { // at the moment, we only want to report on unused codes // that are on the same specialisation depth as this archetype. if (specialisationDepth == StringUtils.countMatches(term.getCode(), ".")) { error = new ValidationError(ErrorType.WOUC, null, term.getCode(), defs.getLanguage()); errors.add(error); } } } } }
private void checkTermExistsInParent(Archetype parentArchetype, String code, List<ValidationError> errors) { String lang = parentArchetype.getOriginalLanguage().getCodeString(); List<OntologyDefinitions> defList = parentArchetype.getOntology().getTermDefinitionsList(); OntologyDefinitions priDefs = null; ValidationError error = null; for(OntologyDefinitions defs : defList) { if(lang.equals(defs.getLanguage())) { priDefs = defs; } } if(priDefs == null) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } else { List<ArchetypeTerm> terms = priDefs.getDefinitions(); Set<String> definedCodes = new LinkedHashSet<String>(); for(ArchetypeTerm term : terms) { definedCodes.add(term.getCode()); } if( ! definedCodes.contains(code)) { error = new ValidationError(ErrorType.VATDF, "INPARENT", code); errors.add(error); } } }