/** Log to the warn channel. @see RedwoodChannels#logf(Flag, String, Object...) */ public void warnf(String format, Object... args) { warn((Supplier<String>) () -> new Formatter().format(format, args).toString()); }
protected void finalize() throws Throwable { super.finalize(); if (readerOpen) { logger.warn("Forgot to close FileIterable -- closing from finalize()"); reader.close(); } } };
/** Add NER tags to a tree. **/ private static void addNERTags(Tree tree) { // set up tagger if necessary if (NER_TAGGER == null || NER_CLASSIFY_METHOD == null) { setupNERTagger(); } if (NER_TAGGER != null && NER_CLASSIFY_METHOD != null) { // we have everything successfully setup and so can act. try { // classify List<CoreLabel> labels = tree.yield().stream().map(w -> (CoreLabel) w).collect(Collectors.toList()); NER_CLASSIFY_METHOD.invoke(NER_TAGGER, labels); } catch (Exception ex) { log.warn("Error running " + NER_COMBINER_NAME + " on Tree! Not applying NER tags!"); } } }
/** * Returns the elements in the given file with the given tag associated with * the text content of the previous and next siblings up to max numIncludedSiblings. * * @return List of {@code Triple<String, Element, String>} Targeted elements surrounded * by the text content of the two previous siblings and two next siblings. */ public static List<Triple<String, Element, String>> getTagElementTriplesFromFileNumBounded(File f, String tag, int num) { List<Triple<String, Element, String>> sents = Generics.newArrayList(); try { sents = getTagElementTriplesFromFileNumBoundedSAXException(f, tag, num); } catch (SAXException e) { log.warn(e); } return sents; }
/** Add NER tags to a semantic graph. **/ private static void addNERTags(SemanticGraph sg) { // set up tagger if necessary if (NER_TAGGER == null || NER_CLASSIFY_METHOD == null) { setupNERTagger(); } if (NER_TAGGER != null && NER_CLASSIFY_METHOD != null) { // we have everything successfully setup and so can act. try { // classify List<CoreLabel> labels = sg.vertexListSorted().stream().map(IndexedWord::backingLabel).collect(Collectors.toList()); NER_CLASSIFY_METHOD.invoke(NER_TAGGER, labels); } catch (Exception ex) { log.warn("Error running " + NER_COMBINER_NAME + " on SemanticGraph! Not applying NER tags!"); } } }
public static String elapsedTime(Date d1, Date d2){ try{ Duration period = Duration.between(d1.toInstant(), d2.toInstant()); // Note: this will become easier with Java 9, using toDaysPart() etc. long days = period.toDays(); period = period.minusDays(days); long hours = period.toHours(); period = period.minusHours(hours); long minutes = period.toMinutes(); period = period.minusMinutes(minutes); long seconds = period.getSeconds(); return days + " days, " + hours + " hours, " + minutes + " minutes, " + seconds + " seconds"; } catch(java.lang.IllegalArgumentException e) { log.warn(e); } return ""; }
/** * Add a word to the lexicon, unless it contains some non-Chinese character. */ private void addStringToLexicon(String str) { if(str.equals("")) { logger.warn("WARNING: blank line in lexicon"); } else if(str.contains(" ")) { logger.warn("WARNING: word with space in lexicon"); } else { if(excludeChar(str)) { printlnErr("skipping word: "+str); return; } // printlnErr("adding word: "+str); words.add(str); } }
/** * @return the new XMLTag object, or null if couldn't be created */ public static XMLTag readAndParseTag(Reader r) throws IOException { String s = readTag(r); if (s == null) { return null; } XMLTag ret = null; try { ret = new XMLTag(s); } catch (Exception e) { log.warn("Failed to handle |" + s + "|"); } return ret; }
/** * Returns the elements in the given file with the given tag associated with * the text content of the two previous siblings and two next siblings. * * @return List of {@code Triple<String, Element, String>} Targeted elements surrounded * by the text content of the two previous siblings and two next siblings. */ public static List<Triple<String, Element, String>> getTagElementTriplesFromFile(File f, String tag) { List<Triple<String, Element, String>> sents = Generics.newArrayList(); try { sents = getTagElementTriplesFromFileSAXException(f, tag); } catch (SAXException e) { log.warn(e); } return sents; }
@Override public Integer call() { O result = null; try { result = processor.process(item); } catch (Exception | Error e) { log.warn(e); // Hope that the consumer knows how to handle null! } QueueItem<O> output = new QueueItem<>(result, itemId); callback.call(output, processorId); return itemId; } }
/** * Returns the text content of all nodes in the given file with the given tag. * * @return List of String text contents of tags. */ public static List<String> getTextContentFromTagsFromFile(File f, String tag) { List<String> sents = Generics.newArrayList(); try { sents = getTextContentFromTagsFromFileSAXException(f, tag); } catch (SAXException e) { log.warn(e); } return sents; }
@Override public void warning(SAXParseException exception) { log.warn(makeBetterErrorString("Warning", exception)); }
/** * Returns the text content of all nodes in the given file with the given tag. * * @return List of String text contents of tags. */ public static List<Element> getTagElementsFromFile(File f, String tag) { List<Element> sents = Generics.newArrayList(); try { sents = getTagElementsFromFileSAXException(f, tag); } catch (SAXException e) { log.warn(e); } return sents; }
/** * Call this if you are no longer using StanfordCoreNLP and want to * release the memory associated with the annotators. */ public static synchronized void clearAnnotatorPool() { logger.warn("Clearing CoreNLP annotation pool; this should be unnecessary in production"); GLOBAL_ANNOTATOR_CACHE.clear(); }
/** * The straightforward constructor. */ private CachedAnnotator(String signature, Lazy<Annotator> annotator) { if (!annotator.isCache()) { log.warn("Cached annotator will never GC -- this can cause OOM exceptions!"); } this.signature = signature; this.annotator = annotator; }
@Override public Tree evaluate(Tree tree, TregexMatcher tregex) { Tree result = newNodeNames.get(label); if (result == null) { result = tregex.getNode(label); } if (result == null) { log.warn("Null node fetched by Tsurgeon operation for node: " + label + " (either no node labeled this, or the labeled node didn't match anything)"); } return result; }
public ChineseNumberSequenceClassifier(Properties props, boolean useSUTime, Properties sutimeProps) { super(props); this.useSUTime = useSUTime; if(this.useSUTime) { // TODO: Need a Chinese version of SUTime log.warn("SUTime currently does not support Chinese. Ignore property ner.useSUTime."); } this.timexExtractor = null; }
public static void main(String[] args) { try { Properties props = StringUtils.argsToPropertiesWithResolve(args); GetPatternsFromDataMultiClass.<SurfacePattern>run(props); } catch (OutOfMemoryError e) { System.out.println("Out of memory! Either change the memory allotted by running as java -mx20g ... for example if you want to allocate 20G. Or consider using batchProcessSents and numMaxSentencesPerBatchFile flags"); log.warn(e); } catch (Exception e) { log.warn(e); } }
@Override public List<CoreLabel> classifyWithGlobalInformation(List<CoreLabel> tokenSequence, CoreMap document, CoreMap sentence) { if(useSUTime) { log.warn("Warning: ChineseNumberSequenceClassifier does not have SUTime implementation."); } return classify(tokenSequence); }
protected static List<List<CRFDatum<Collection<String>, String>>> loadProcessedData(String filename) { List<List<CRFDatum<Collection<String>, String>>> result; try { result = IOUtils.readObjectFromURLOrClasspathOrFileSystem(filename); } catch (Exception e) { log.warn(e); result = Collections.emptyList(); } log.info("Loading processed data from serialized file ... done. Got " + result.size() + " datums."); return result; }