/** * Create a new instance with the types enhancer:Enhancement and * enhancer:TextAnnotation in the metadata-graph of the content * item along with default properties (dc:creator and dc:created) and return * the IRI of the extraction so that engines can further add. * * @param ci the ContentItem being under analysis * @param engine the Engine performing the analysis * * @return the URI of the new enhancement instance */ public static IRI createTextEnhancement(ContentItem ci, EnhancementEngine engine){ return createTextEnhancement(ci.getMetadata(), engine, new IRI(ci.getUri().getUnicodeString())); } /**
/** * Create a new instance with the types enhancer:Enhancement and * enhancer:TextAnnotation in the metadata-graph of the content * item along with default properties (dc:creator and dc:created) and return * the IRI of the extraction so that engines can further add. * * @param ci the ContentItem being under analysis * @param engine the Engine performing the analysis * * @return the URI of the new enhancement instance */ public static IRI createTextEnhancement(ContentItem ci, EnhancementEngine engine){ return createTextEnhancement(ci.getMetadata(), engine, new IRI(ci.getUri().getUnicodeString())); } /**
IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(hypothesis.lang))); g.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(hypothesis.prob)));
ci.getLock().writeLock().lock(); try { IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(language))); g.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
private void addLemmatizationEnhancement(ContentItem ci, String text, String language, Graph g) throws EngineException { Language lang = new Language(language); // clerezza language for PlainLiterals String lemmatizedContents; try { lemmatizedContents = this.client.lemmatizeContents(text, language); } catch (IOException e) { throw new EngineException("Error while calling the CELI Lemmatizer" + " service (configured URL: " + serviceURL + ")!", e); } catch (SOAPException e) { throw new EngineException("Error wile encoding/decoding the request/" + "response to the CELI lemmatizer service!", e); } // get a write lock before writing the enhancements ci.getLock().writeLock().lock(); try { IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textEnhancement, CeliLemmatizerEnhancementEngine.hasLemmaForm, new PlainLiteralImpl(lemmatizedContents, lang))); } finally { ci.getLock().writeLock().unlock(); } }
if (tnfs.checkFeatureStructureAllowed(typeName, fs.getFeatures())) { logger.debug(new StringBuilder("Adding ").append(typeName).toString()); IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement( ci, this); Graph metadata = ci.getMetadata();
} else { IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(enhancements, this, ciId); textAnnotations.add(textAnnotation);
try { GuessedLanguage gl = lista.get(0); IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(gl.getLang()))); g.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(gl.getConfidence())));
IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this); metadata.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(suggestion.getLanguage()))); metadata.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(name, language)));
IRI textAnnotation = createTextEnhancement(ci, this);
textAnnotation = createTextEnhancement(enhancements, this, ciId); enhancements.add(new TripleImpl(textAnnotation,DC_TYPE,SKOS_CONCEPT));
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
IRI enh = createTextEnhancement(ci, this); String phraseText = at.getSpan().substring(sentPhrase.getStartIndex(), sentPhrase.getEndIndex()); metadata.add(new TripleImpl(enh, ENHANCER_SELECTED_TEXT,
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement( ci, this); Graph model = ci.getMetadata();
return; //nothing to do IRI enh = createTextEnhancement(ci, this); Graph metadata = ci.getMetadata(); if(section.getType() == SpanTypeEnum.Sentence){
for (CeliMorphoFeatures feat : mFeatures) { IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this); g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(le.getWordForm(), lang))); if (le.from >= 0 && le.to > 0) {
textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this); metadata.add(new TripleImpl(textAnnotation, Properties.ENHANCER_START,
Graph model = ci.getMetadata(); IRI textAnnotation = EnhancementEngineHelper .createTextEnhancement(ci, engine); model.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(occ.name, lang)));
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this); textAnnotations.add(textAnnotation); metadata.add(new TripleImpl(textAnnotation,