Tabnine Logo
NameSample.isClearAdaptiveDataSet
Code IndexAdd Tabnine to your IDE (free)

How to use
isClearAdaptiveDataSet
method
in
opennlp.tools.namefind.NameSample

Best Java code snippets using opennlp.tools.namefind.NameSample.isClearAdaptiveDataSet (Showing top 20 results out of 315)

origin: apache/opennlp

@Override
public int hashCode() {
 return Objects.hash(Arrays.hashCode(getSentence()), Arrays.hashCode(getNames()),
   Arrays.hashCode(getAdditionalContext()), isClearAdaptiveDataSet());
}
origin: apache/opennlp

@Override
public boolean equals(Object obj) {
 if (this == obj) {
  return true;
 }
 if (obj instanceof NameSample) {
  NameSample a = (NameSample) obj;
  return Arrays.equals(getSentence(), a.getSentence()) &&
    Arrays.equals(getNames(), a.getNames()) &&
    Arrays.equals(getAdditionalContext(), a.getAdditionalContext()) &&
    isClearAdaptiveDataSet() == a.isClearAdaptiveDataSet();
 }
 return false;
}
origin: apache/opennlp

public DocumentSample read() throws IOException {
 List<NameSample> document = new ArrayList<>();
 if (beginSample == null) {
  // Assume that the clear flag is set
  beginSample = samples.read();
 }
 // Underlying stream is exhausted!
 if (beginSample == null) {
  return null;
 }
 document.add(beginSample);
 NameSample sample;
 while ((sample = samples.read()) != null) {
  if (sample.isClearAdaptiveDataSet()) {
   beginSample = sample;
   break;
  }
  document.add(sample);
 }
 // Underlying stream is exhausted,
 // next call must return null
 if (sample == null) {
  beginSample = null;
 }
 return new DocumentSample(document.toArray(new NameSample[document.size()]));
}
origin: apache/opennlp

if (isClearAdaptiveDataSet())
 result.append("\n");
origin: apache/opennlp

 public NameSample read() throws IOException {

  NameSample sample = samples.read();

  if (sample != null) {

   List<Span> filteredNames = new ArrayList<>();

   for (Span name : sample.getNames()) {
    if (types.contains(name.getType())) {
     filteredNames.add(name);
    }
   }

   return new NameSample(sample.getId(), sample.getSentence(),
     filteredNames.toArray(new Span[filteredNames.size()]), null, sample.isClearAdaptiveDataSet());
  }
  else {
   return null;
  }
 }
}
origin: apache/opennlp

@Override
protected Iterator<Event> createEvents(NameSample sample) {
 if (sample.isClearAdaptiveDataSet()) {
  contextGenerator.clearAdaptiveData();
 }
 Span[] names = sample.getNames();
 if (!Objects.isNull(this.defaultType)) {
  overrideType(names);
 }
 String[] outcomes = codec.encode(names, sample.getSentence().length);
 // String outcomes[] = generateOutcomes(sample.getNames(), type, sample.getSentence().length);
 additionalContextFeatureGenerator.setCurrentContext(sample.getAdditionalContext());
 String[] tokens = new String[sample.getSentence().length];
 for (int i = 0; i < sample.getSentence().length; i++) {
  tokens[i] = sample.getSentence()[i];
 }
 return generateEvents(tokens, outcomes, contextGenerator).iterator();
}
origin: apache/opennlp

/**
 * Evaluates the given reference {@link NameSample} object.
 *
 * This is done by finding the names with the
 * {@link TokenNameFinder} in the sentence from the reference
 * {@link NameSample}. The found names are then used to
 * calculate and update the scores.
 *
 * @param reference the reference {@link NameSample}.
 *
 * @return the predicted {@link NameSample}.
 */
@Override
protected NameSample processSample(NameSample reference) {
 if (reference.isClearAdaptiveDataSet()) {
  nameFinder.clearAdaptiveData();
 }
 Span[] predictedNames = nameFinder.find(reference.getSentence());
 Span[] references = reference.getNames();
 // OPENNLP-396 When evaluating with a file in the old format
 // the type of the span is null, but must be set to default to match
 // the output of the name finder.
 for (int i = 0; i < references.length; i++) {
  if (references[i].getType() == null) {
   references[i] = new Span(references[i].getStart(), references[i].getEnd(), "default");
  }
 }
 fmeasure.updateScores(references, predictedNames);
 return new NameSample(reference.getSentence(), predictedNames, reference.isClearAdaptiveDataSet());
}
origin: apache/opennlp

@Test
public void testClearAdaptiveData() throws IOException {
 String trainingData = "a\n" +
   "b\n" +
   "c\n" +
   "\n" +
   "d\n";
 ObjectStream<String> untokenizedLineStream = new PlainTextByLineStream(
   new MockInputStreamFactory(trainingData), StandardCharsets.UTF_8);
 ObjectStream<NameSample> trainingStream = new NameSampleDataStream(untokenizedLineStream);
 assertFalse(trainingStream.read().isClearAdaptiveDataSet());
 assertFalse(trainingStream.read().isClearAdaptiveDataSet());
 assertFalse(trainingStream.read().isClearAdaptiveDataSet());
 assertTrue(trainingStream.read().isClearAdaptiveDataSet());
 assertNull(trainingStream.read());
 trainingStream.close();
}
origin: apache/opennlp

@Test
public void testParsingDutchSample() throws IOException {
 ObjectStream<NameSample> sampleStream = openData(LANGUAGE.NLD, "conll2002-nl.sample");
 NameSample personName = sampleStream.read();
 Assert.assertEquals(0, personName.getNames().length);
 Assert.assertTrue(personName.isClearAdaptiveDataSet());
 personName = sampleStream.read();
 Assert.assertFalse(personName.isClearAdaptiveDataSet());
 Assert.assertNull(sampleStream.read());
}
origin: apache/opennlp

@Test
public void testParsingGermanSample() throws IOException {
 ObjectStream<NameSample> sampleStream = openData(LANGUAGE.DE, GERMAN_SAMPLE);
 NameSample personName = sampleStream.read();
 Assert.assertNotNull(personName);
 Assert.assertEquals(5, personName.getSentence().length);
 Assert.assertEquals(0, personName.getNames().length);
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
}
origin: apache/opennlp

@Test
public void testParsingItalianSample() throws IOException {
 ObjectStream<NameSample> sampleStream = openData(LANGUAGE.IT, "evalita-ner-it.sample");
 NameSample personName = sampleStream.read();
 Assert.assertNotNull(personName);
 Assert.assertEquals(11, personName.getSentence().length);
 Assert.assertEquals(1, personName.getNames().length);
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
 Span nameSpan = personName.getNames()[0];
 Assert.assertEquals(8, nameSpan.getStart());
 Assert.assertEquals(10, nameSpan.getEnd());
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
 Assert.assertEquals(0, sampleStream.read().getNames().length);
 Assert.assertNull(sampleStream.read());
}
origin: apache/opennlp

@Test
public void testParsingSpanishSample() throws IOException {
 ObjectStream<NameSample> sampleStream = openData(LANGUAGE.SPA, "conll2002-es.sample");
 NameSample personName = sampleStream.read();
 Assert.assertNotNull(personName);
 Assert.assertEquals(5, personName.getSentence().length);
 Assert.assertEquals(1, personName.getNames().length);
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
 Span nameSpan = personName.getNames()[0];
 Assert.assertEquals(0, nameSpan.getStart());
 Assert.assertEquals(4, nameSpan.getEnd());
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
 Assert.assertEquals(0, sampleStream.read().getNames().length);
 Assert.assertNull(sampleStream.read());
}
origin: org.apache.opennlp/opennlp-tools

@Override
public int hashCode() {
 return Objects.hash(Arrays.hashCode(getSentence()), Arrays.hashCode(getNames()),
   Arrays.hashCode(getAdditionalContext()), isClearAdaptiveDataSet());
}
origin: ai.idylnlp/idylnlp-opennlp-tools-1.8.3

@Override
public int hashCode() {
 return Objects.hash(Arrays.hashCode(getSentence()), Arrays.hashCode(getNames()),
   Arrays.hashCode(getAdditionalContext()), isClearAdaptiveDataSet());
}
origin: apache/opennlp

@Test
public void testParsingEnglishSample() throws IOException {
 ObjectStream<NameSample> sampleStream = openData(LANGUAGE.EN, ENGLISH_SAMPLE);
 NameSample personName = sampleStream.read();
 Assert.assertNotNull(personName);
 Assert.assertEquals(9, personName.getSentence().length);
 Assert.assertEquals(0, personName.getNames().length);
 Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
 personName = sampleStream.read();
 Assert.assertNotNull(personName);
 Assert.assertEquals(2, personName.getSentence().length);
 Assert.assertEquals(1, personName.getNames().length);
 Assert.assertEquals(false, personName.isClearAdaptiveDataSet());
 Span nameSpan = personName.getNames()[0];
 Assert.assertEquals(0, nameSpan.getStart());
 Assert.assertEquals(2, nameSpan.getEnd());
 Assert.assertNull(sampleStream.read());
}
origin: org.apache.opennlp/opennlp-tools

@Override
public boolean equals(Object obj) {
 if (this == obj) {
  return true;
 }
 if (obj instanceof NameSample) {
  NameSample a = (NameSample) obj;
  return Arrays.equals(getSentence(), a.getSentence()) &&
    Arrays.equals(getNames(), a.getNames()) &&
    Arrays.equals(getAdditionalContext(), a.getAdditionalContext()) &&
    isClearAdaptiveDataSet() == a.isClearAdaptiveDataSet();
 }
 return false;
}
origin: ai.idylnlp/idylnlp-opennlp-tools-1.8.3

@Override
public boolean equals(Object obj) {
 if (this == obj) {
  return true;
 }
 if (obj instanceof NameSample) {
  NameSample a = (NameSample) obj;
  return Arrays.equals(getSentence(), a.getSentence()) &&
    Arrays.equals(getNames(), a.getNames()) &&
    Arrays.equals(getAdditionalContext(), a.getAdditionalContext()) &&
    isClearAdaptiveDataSet() == a.isClearAdaptiveDataSet();
 }
 return false;
}
origin: ai.idylnlp/idylnlp-opennlp-tools-1.8.3

 public NameSample read() throws IOException {

  NameSample sample = samples.read();

  if (sample != null) {

   List<Span> filteredNames = new ArrayList<>();

   for (Span name : sample.getNames()) {
    if (types.contains(name.getType())) {
     filteredNames.add(name);
    }
   }

   return new NameSample(sample.getId(), sample.getSentence(),
     filteredNames.toArray(new Span[filteredNames.size()]), null, sample.isClearAdaptiveDataSet());
  }
  else {
   return null;
  }
 }
}
origin: org.apache.opennlp/opennlp-tools

 public NameSample read() throws IOException {

  NameSample sample = samples.read();

  if (sample != null) {

   List<Span> filteredNames = new ArrayList<>();

   for (Span name : sample.getNames()) {
    if (types.contains(name.getType())) {
     filteredNames.add(name);
    }
   }

   return new NameSample(sample.getId(), sample.getSentence(),
     filteredNames.toArray(new Span[filteredNames.size()]), null, sample.isClearAdaptiveDataSet());
  }
  else {
   return null;
  }
 }
}
origin: org.apache.opennlp/opennlp-tools

@Override
protected Iterator<Event> createEvents(NameSample sample) {
 if (sample.isClearAdaptiveDataSet()) {
  contextGenerator.clearAdaptiveData();
 }
 Span[] names = sample.getNames();
 if (!Objects.isNull(this.defaultType)) {
  overrideType(names);
 }
 String[] outcomes = codec.encode(names, sample.getSentence().length);
 // String outcomes[] = generateOutcomes(sample.getNames(), type, sample.getSentence().length);
 additionalContextFeatureGenerator.setCurrentContext(sample.getAdditionalContext());
 String[] tokens = new String[sample.getSentence().length];
 for (int i = 0; i < sample.getSentence().length; i++) {
  tokens[i] = sample.getSentence()[i];
 }
 return generateEvents(tokens, outcomes, contextGenerator).iterator();
}
opennlp.tools.namefindNameSampleisClearAdaptiveDataSet

Popular methods of NameSample

  • <init>
    Initializes the current instance.
  • getNames
  • getSentence
  • toString
  • getAdditionalContext
  • parse
  • errorTokenWithContext
  • getId
  • equals

Popular in Java

  • Parsing JSON documents to java classes using gson
  • setContentView (Activity)
  • getContentResolver (Context)
  • getApplicationContext (Context)
  • BufferedWriter (java.io)
    Wraps an existing Writer and buffers the output. Expensive interaction with the underlying reader is
  • BigDecimal (java.math)
    An immutable arbitrary-precision signed decimal.A value is represented by an arbitrary-precision "un
  • Selector (java.nio.channels)
    A controller for the selection of SelectableChannel objects. Selectable channels can be registered w
  • Enumeration (java.util)
    A legacy iteration interface.New code should use Iterator instead. Iterator replaces the enumeration
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • Project (org.apache.tools.ant)
    Central representation of an Ant project. This class defines an Ant project with all of its targets,
  • Top plugins for Android Studio
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now