Tabnine Logo
MultiLabelClassifier
Code IndexAdd Tabnine to your IDE (free)

How to use
MultiLabelClassifier
in
meka.classifiers.multilabel

Best Java code snippets using meka.classifiers.multilabel.MultiLabelClassifier (Showing top 20 results out of 315)

origin: dkpro/dkpro-tc

public Classifier train(Instances data, File model, List<String> parameters) throws Exception
{
  List<String> mlArgs = parameters.subList(1, parameters.size());
  MultiLabelClassifier cl = (MultiLabelClassifier) AbstractClassifier
      .forName((String) parameters.get(0), new String[] {});
  if (!mlArgs.isEmpty()) {
    cl.setOptions(mlArgs.toArray(new String[0]));
  }
  cl.buildClassifier(data);
  if (serializeModel) {
    weka.core.SerializationHelper.write(model.getAbsolutePath(), cl);
  }
  return cl;
}
origin: Waikato/meka

@Override
public double[] distributionForInstance(Instance x) throws Exception {
  double p[] = new double[x.classIndex()];
  for(int i = 0; i < m_NumIterations; i++) {
    double d[] = m_Classifiers[i].distributionForInstance(x);
    for(int j = 0; j < d.length; j++) {
      p[j] += d[j];
    }
  }
  // turn votes into a [0,1] confidence for each label
  for(int j = 0; j < p.length; j++) {
    p[j] = p[j]/m_NumIterations;
  }
  return p;
}
origin: Waikato/meka

/**
 * Returns a string representation of the model.
 *
 * @return the model
 */
@Override
public String getModel() {
  if (m_Classifier instanceof MultiLabelClassifier)
    return ((MultiLabelClassifier) m_Classifier).getModel();
  else
    return toString();
}
origin: net.sf.meka/meka

  Evaluation.printOptions(h.listOptions());
  return;
h.setOptions(options);
if (h.getDebug()) System.out.println("Loading and preparing dataset ...");
    if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(D_train)+"\tL="+L+"\tD(t:T)=("+D_train.numInstances()+":"+D_test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(D_train,L),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(D_test,L),2)+")");
          h.buildClassifier(D_train);
        h.buildClassifier(D_full);
      Instances predicted = new Instances(D_test, 0);
      for (int i = 0; i < D_test.numInstances(); i++) {
        double pred[] = h.distributionForInstance(D_test.instance(i));
  Evaluation.printOptions(h.listOptions());
  System.exit(1);
origin: Waikato/meka

if (h.getDebug())
  System.out.println(":- Classifier -: "+h.getClass().getName()+": "+Arrays.toString(h.getOptions()));
if (h.getDebug()) {
  System.out.println("Training classifier on initial window ...");
h.buildClassifier(D_init); 										// initial classifier
train_time = System.currentTimeMillis() - train_time;
if (h.getDebug()) {
  System.out.println("Done (in "+(train_time/1000.0)+" s)");
if (h.getDebug()) {
  System.out.println("--------------------------------------------------------------------------------");
  System.out.print("#"+Utils.padLeft("w",6)+" "+Utils.padLeft("n",6));
      double y[] = h.distributionForInstance(x_);
      long after_test = System.currentTimeMillis();
  if (h.getDebug()) {
    System.out.print("#"+Utils.doubleToString((double)w+1,6,0)+" "+Utils.doubleToString((double)n,6,0));
    n = 0;
if (h.getDebug()) {
  System.out.println("--------------------------------------------------------------------------------");
result.vals.put("Total instances tested",(double)i);
result.vals.put("Initial instances for training",(double)windowSize);
result.setInfo("Options", Arrays.toString(h.getOptions()));
result.setInfo("Additional Info", h.toString());
origin: net.sf.meka/meka

@Override
public void buildClassifier(Instances train) throws Exception {
   testCapabilities(train);
   
  if (getDebug()) System.out.print("-: Models: ");
  train = new Instances(train);
  m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier, m_NumIterations);
  int sub_size = (train.numInstances()*m_BagSizePercent/100);
  for(int i = 0; i < m_NumIterations; i++) {
    if(getDebug()) System.out.print(""+i+" ");
    if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(i);
    train.randomize(new Random(m_Seed+i));
    Instances sub_train = new Instances(train,0,sub_size);
    m_Classifiers[i].buildClassifier(sub_train);
  }
  if (getDebug()) System.out.println(":-");
}
origin: net.sf.meka/meka

/**
 * Called by classifier's main() method upon initialisation from the command line. 
 * @param    h        A classifier
 * @param    args    Command-line options.
 */
public static void runClassifier(MultiLabelClassifier h, String args[]) {
    if (h instanceof UpdateableClassifier) {
      try {
        IncrementalEvaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        IncrementalEvaluation.printOptions(h.listOptions());
      }
    }
    else {
      try {
        Evaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        Evaluation.printOptions(h.listOptions());
      }
    }
}
origin: net.sf.meka/meka

Instances D_train = D.trainCV(numFolds,i);
Instances D_test = D.testCV(numFolds,i);
if (h.getDebug()) System.out.println(":- Fold ["+i+"/"+numFolds+"] -: "+MLUtils.getDatasetName(D)+"\tL="+D.classIndex()+"\tD(t:T)=("+D_train.numInstances()+":"+D_test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(D_train,D.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(D_test,D.classIndex()),2)+")");
origin: Waikato/meka

  Evaluation.printOptions(h.listOptions());
  return;
h.setOptions(options);
if (h.getDebug()) System.out.println("Loading and preparing dataset ...");
    if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(D_train)+"\tL="+L+"\tD(t:T)=("+D_train.numInstances()+":"+D_test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(D_train,L),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(D_test,L),2)+")");
          h.buildClassifier(D_train);
        h.buildClassifier(D_full);
      Instances predicted = new Instances(D_test, 0);
      for (int i = 0; i < D_test.numInstances(); i++) {
        double pred[] = h.distributionForInstance(D_test.instance(i));
  Evaluation.printOptions(h.listOptions());
  System.exit(1);
origin: net.sf.meka/meka

if (h.getDebug())
  System.out.println(":- Classifier -: "+h.getClass().getName()+": "+Arrays.toString(h.getOptions()));
if (h.getDebug()) {
  System.out.println("Training classifier on initial window ...");
h.buildClassifier(D_init); 										// initial classifier
train_time = System.currentTimeMillis() - train_time;
if (h.getDebug()) {
  System.out.println("Done (in "+(train_time/1000.0)+" s)");
if (h.getDebug()) {
  System.out.println("--------------------------------------------------------------------------------");
  System.out.print("#"+Utils.padLeft("w",6)+" "+Utils.padLeft("n",6));
      double y[] = h.distributionForInstance(x_);
      long after_test = System.currentTimeMillis();
  if (h.getDebug()) {
    System.out.print("#"+Utils.doubleToString((double)w+1,6,0)+" "+Utils.doubleToString((double)n,6,0));
    n = 0;
if (h.getDebug()) {
  System.out.println("--------------------------------------------------------------------------------");
result.vals.put("Total instances tested",(double)i);
result.vals.put("Initial instances for training",(double)windowSize);
result.setInfo("Options", Arrays.toString(h.getOptions()));
result.setInfo("Additional Info", h.toString());
origin: Waikato/meka

@Override
public void buildClassifier(Instances train) throws Exception {
   testCapabilities(train);
   
  if (getDebug()) System.out.print("-: Models: ");
  train = new Instances(train);
  m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier, m_NumIterations);
  int sub_size = (train.numInstances()*m_BagSizePercent/100);
  for(int i = 0; i < m_NumIterations; i++) {
    if(getDebug()) System.out.print(""+i+" ");
    if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(i);
    train.randomize(new Random(m_Seed+i));
    Instances sub_train = new Instances(train,0,sub_size);
    m_Classifiers[i].buildClassifier(sub_train);
  }
  if (getDebug()) System.out.println(":-");
}
origin: Waikato/meka

/**
 * Called by classifier's main() method upon initialisation from the command line. 
 * @param    h        A classifier
 * @param    args    Command-line options.
 */
public static void runClassifier(MultiLabelClassifier h, String args[]) {
    if (h instanceof UpdateableClassifier) {
      try {
        IncrementalEvaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        IncrementalEvaluation.printOptions(h.listOptions());
      }
    }
    else {
      try {
        Evaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        Evaluation.printOptions(h.listOptions());
      }
    }
}
origin: Waikato/meka

Instances D_train = D.trainCV(numFolds,i);
Instances D_test = D.testCV(numFolds,i);
if (h.getDebug()) System.out.println(":- Fold ["+i+"/"+numFolds+"] -: "+MLUtils.getDatasetName(D)+"\tL="+D.classIndex()+"\tD(t:T)=("+D_train.numInstances()+":"+D_test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(D_train,D.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(D_test,D.classIndex()),2)+")");
origin: net.sf.meka/meka

@Override
public void buildClassifier(Instances train) throws Exception {
   testCapabilities(train);
   
  if (getDebug()) System.out.print("-: Models: ");
  //m_Classifiers = (MultilabelClassifier[]) AbstractClassifier.makeCopies(m_Classifier, m_NumIterations);
  m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier, m_NumIterations);
  for(int i = 0; i < m_NumIterations; i++) {
    Random r = new Random(m_Seed+i);
    Instances bag = new Instances(train,0);
    if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i);
    if(getDebug()) System.out.print(""+i+" ");
    int bag_no = (m_BagSizePercent*train.numInstances()/100);
    //System.out.println(" bag no: "+bag_no);
    while(bag.numInstances() < bag_no) {
      bag.add(train.instance(r.nextInt(train.numInstances())));
    }
    m_Classifiers[i].buildClassifier(bag);
  }
  if (getDebug()) System.out.println(":-");
}
origin: org.dkpro.tc/dkpro-tc-ml-weka

public Classifier train(Instances data, File model, List<String> parameters) throws Exception
{
  List<String> mlArgs = parameters.subList(1, parameters.size());
  MultiLabelClassifier cl = (MultiLabelClassifier) AbstractClassifier
      .forName((String) parameters.get(0), new String[] {});
  if (!mlArgs.isEmpty()) {
    cl.setOptions(mlArgs.toArray(new String[0]));
  }
  cl.buildClassifier(data);
  if (serializeModel) {
    weka.core.SerializationHelper.write(model.getAbsolutePath(), cl);
  }
  return cl;
}
origin: net.sf.meka/meka

/**
 * Returns a string representation of the model.
 *
 * @return the model
 */
@Override
public String getModel() {
  if (m_Classifier instanceof MultiLabelClassifier)
    return ((MultiLabelClassifier) m_Classifier).getModel();
  else
    return toString();
}
origin: Waikato/meka

/**
 * Called by classifier's main() method upon initialisation from the command line. 
 * @param    h        A classifier
 * @param    args    Command-line options.
 */
public static void runClassifier(MultiLabelClassifier h, String args[]) {
    if (h instanceof UpdateableClassifier) {
      try {
        IncrementalEvaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        IncrementalEvaluation.printOptions(h.listOptions());
      }
    }
    else {
      try {
        Evaluation.runExperiment(h,args);
      } catch(Exception e) {
        System.err.println("\n"+e);
        //e.printStackTrace();
        Evaluation.printOptions(h.listOptions());
      }
    }
}
origin: net.sf.meka/meka

@Override
public double[] distributionForInstance(Instance x) throws Exception {
  double p[] = new double[x.classIndex()];
  for(int i = 0; i < m_NumIterations; i++) {
    double d[] = m_Classifiers[i].distributionForInstance(x);
    for(int j = 0; j < d.length; j++) {
      p[j] += d[j];
    }
  }
  // turn votes into a [0,1] confidence for each label
  for(int j = 0; j < p.length; j++) {
    p[j] = p[j]/m_NumIterations;
  }
  return p;
}
origin: Waikato/meka

@Override
public void buildClassifier(Instances train) throws Exception {
   testCapabilities(train);
   
  if (getDebug()) System.out.print("-: Models: ");
  //m_Classifiers = (MultilabelClassifier[]) AbstractClassifier.makeCopies(m_Classifier, m_NumIterations);
  m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier, m_NumIterations);
  for(int i = 0; i < m_NumIterations; i++) {
    Random r = new Random(m_Seed+i);
    Instances bag = new Instances(train,0);
    if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i);
    if(getDebug()) System.out.print(""+i+" ");
    int bag_no = (m_BagSizePercent*train.numInstances()/100);
    //System.out.println(" bag no: "+bag_no);
    while(bag.numInstances() < bag_no) {
      bag.add(train.instance(r.nextInt(train.numInstances())));
    }
    m_Classifiers[i].buildClassifier(bag);
  }
  if (getDebug()) System.out.println(":-");
}
origin: net.sf.meka/meka

  /**
   * Returns a string representation of the model.
   *
   * @return      the model
   */
  public String getModel() {
    StringBuilder   result;
    int             i;
    String          model;

    if (m_Classifiers == null)
      return getClass().getName() + ": No model built yet";

    result = new StringBuilder();
    for (i = 0; i < m_Classifiers.length; i++) {
      if (i > 0)
        result.append("\n\n");
      result.append(getClass().getName() + ": Model #" + (i+1) + "\n\n");
      model = m_Classifiers[i].getModel();
      if (model.length() > 0)
        result.append(model);
      else
        result.append("No model representation available");
    }

    return result.toString();
  }
}
meka.classifiers.multilabelMultiLabelClassifier

Javadoc

Interface for multi-label classifiers.

Most used methods

  • buildClassifier
  • setOptions
  • distributionForInstance
  • getDebug
  • getModel
  • getOptions
  • listOptions

Popular in Java

  • Updating database using SQL prepared statement
  • getSupportFragmentManager (FragmentActivity)
  • runOnUiThread (Activity)
  • scheduleAtFixedRate (Timer)
  • Set (java.util)
    A Set is a data structure which does not allow duplicate elements.
  • UUID (java.util)
    UUID is an immutable representation of a 128-bit universally unique identifier (UUID). There are mul
  • Semaphore (java.util.concurrent)
    A counting semaphore. Conceptually, a semaphore maintains a set of permits. Each #acquire blocks if
  • HttpServlet (javax.servlet.http)
    Provides an abstract class to be subclassed to create an HTTP servlet suitable for a Web site. A sub
  • IsNull (org.hamcrest.core)
    Is the value null?
  • LoggerFactory (org.slf4j)
    The LoggerFactory is a utility class producing Loggers for various logging APIs, most notably for lo
  • Best plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now