public void buildClassifier() { try { if ((classifier instanceof UpdateableClassifier) == false) { Classifier auxclassifier = weka.classifiers.AbstractClassifier.makeCopy(classifier); auxclassifier.buildClassifier(instancesBuffer); classifier = auxclassifier; isBufferStoring = false; } } catch (Exception e) { System.err.println("Building WEKA Classifier: " + e.getMessage()); } }
/** * Set the base learner. * * @param newClassifier the classifier to use. */ @Override public void setClassifier(Classifier newClassifier) { super.setClassifier(newClassifier); try { m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { e.printStackTrace(); } }
/** * Set the base learner. * * @param newClassifier the classifier to use. */ @Override public void setClassifier(Classifier newClassifier) { super.setClassifier(newClassifier); try { m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { e.printStackTrace(); } }
/** * Set the base learner. * * @param newClassifier the classifier to use. */ @Override public void setClassifier(Classifier newClassifier) { super.setClassifier(newClassifier); try { m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { e.printStackTrace(); } }
/** * @param aNumOfModels the number of models in the ensemble * @param aStrategy pruned sets strategy * @param aPercentage percentage of data to sample * @param aP pruned sets parameter p * @param aB pruned sets parameter b * @param baselearner the base learner * @param aThreshold the threshold for producing bipartitions */ public EnsembleOfPrunedSets(double aPercentage, int aNumOfModels, double aThreshold, int aP, PrunedSets.Strategy aStrategy, int aB, Classifier baselearner) { super(baselearner); numOfModels = aNumOfModels; threshold = aThreshold; percentage = aPercentage; ensemble = new PrunedSets[numOfModels]; for (int i = 0; i < numOfModels; i++) { try { ensemble[i] = new PrunedSets(AbstractClassifier.makeCopy(baselearner), aP, aStrategy, aB); } catch (Exception ex) { Logger.getLogger(EnsembleOfPrunedSets.class.getName()).log(Level.SEVERE, null, ex); } } rand = new Random(1); }
/** {@inheritDoc} */ public void buildEvaluator(Instances data) throws Exception { FastRandomForest forest = (FastRandomForest) AbstractClassifier.makeCopy(m_frfProto); forest.buildClassifier(data); m_Importances = forest.getFeatureImportances(); }
/** * RebuildCC - rebuild a classifier chain 'h_old' to have a new sequence 's_new'. */ protected CC rebuildCC(CC h_old, int s_new[], Instances D) throws Exception { // make a deep copy CC h = (CC)AbstractClassifier.makeCopy(h_old); // rebuild this chain h.rebuildClassifier(s_new,new Instances(D)); return h; }
/** * RebuildCC - rebuild a classifier chain 'h_old' to have a new sequence 's_new'. */ protected CC rebuildCC(CC h_old, int s_new[], Instances D) throws Exception { // make a deep copy CC h = (CC)AbstractClassifier.makeCopy(h_old); // rebuild this chain h.rebuildClassifier(s_new,new Instances(D)); return h; }
/** {@inheritDoc} */ public void buildEvaluator(Instances data) throws Exception { FastRandomForest forest = (FastRandomForest) AbstractClassifier.makeCopy(m_frfProto); forest.buildClassifier(data); m_Importances = forest.getFeatureImportances(); }
@Override public void actionPerformed(ActionEvent e) { try { Instances newTrainingData = null; Classifier newClassifier = null; if (m_trainingInstances != null) { newTrainingData = new Instances(m_trainingInstances); } if (m_classifier != null) { newClassifier = AbstractClassifier.makeCopy(m_classifier); } createNewVisualizerWindow(newClassifier, newTrainingData); } catch (Exception ex) { ex.printStackTrace(); } } });
@Override public void actionPerformed(ActionEvent e) { try { Instances newTrainingData = null; Classifier newClassifier = null; if (m_trainingInstances != null) { newTrainingData = new Instances(m_trainingInstances); } if (m_classifier != null) { newClassifier = AbstractClassifier.makeCopy(m_classifier); } createNewVisualizerWindow(newClassifier, newTrainingData); } catch (Exception ex) { ex.printStackTrace(); } } });
/** * Build - Create transformation for this node, and train classifier of type H upon it. * The dataset should have class as index 'j', and remove all indices less than L *not* in paY. */ public void build(Instances D, Classifier H) throws Exception { // transform data T = transform(D); // build SLC 'h' h = AbstractClassifier.makeCopy(H); h.buildClassifier(T); // save templates //t_ = new SparseInstance(T.numAttributes()); //t_.setDataset(T); //t_.setClassMissing(); // [?,x,x,x] T.clear(); }
/** * Build - Create transformation for this node, and train classifier of type H upon it. * The dataset should have class as index 'j', and remove all indices less than L *not* in paY. */ public void build(Instances D, Classifier H) throws Exception { // transform data T = transform(D); // build SLC 'h' h = AbstractClassifier.makeCopy(H); h.buildClassifier(T); // save templates //t_ = new SparseInstance(T.numAttributes()); //t_.setDataset(T); //t_.setClassMissing(); // [?,x,x,x] T.clear(); }
/** * the default constructor. */ public AbstractMultiSearch() { super(); m_Factory = newFactory(); m_Metrics = m_Factory.newMetrics(); m_Evaluation = m_Metrics.getDefaultMetric(); m_Classifier = defaultClassifier(); m_DefaultParameters = defaultSearchParameters(); m_Parameters = defaultSearchParameters(); m_Algorithm = defaultAlgorithm(); m_Trace = new ArrayList<Entry<Integer, Performance>>(); try { m_BestClassifier = new SearchResult(); m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { System.err.println("Failed to create copy of default classifier!"); e.printStackTrace(); } }
/** * the default constructor. */ public AbstractMultiSearch() { super(); m_Factory = newFactory(); m_Metrics = m_Factory.newMetrics(); m_Evaluation = m_Metrics.getDefaultMetric(); m_Classifier = defaultClassifier(); m_DefaultParameters = defaultSearchParameters(); m_Parameters = defaultSearchParameters(); m_Algorithm = defaultAlgorithm(); m_Trace = new ArrayList<Entry<Integer, Performance>>(); try { m_BestClassifier = new SearchResult(); m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { System.err.println("Failed to create copy of default classifier!"); e.printStackTrace(); } }
m_Classifiers.add(AbstractClassifier.makeCopy(m_Classifier)); m_Classifiers.get(m_Classifiers.size() - 1).buildClassifier(m_Data);
/** * the default constructor. */ public MultiSearch() { super(); m_Factory = newFactory(); m_Metrics = m_Factory.newMetrics(); m_Evaluation = m_Metrics.getDefaultMetric(); m_ClassLabel = new SingleIndex("1"); m_Classifier = defaultClassifier(); m_DefaultParameters = defaultSearchParameters(); m_Parameters = defaultSearchParameters(); m_Algorithm = defaultAlgorithm(); m_Trace = new ArrayList<Entry<Integer, Performance>>(); try { m_BestClassifier = new SearchResult(); m_BestClassifier.classifier = AbstractClassifier.makeCopy(m_Classifier); } catch (Exception e) { System.err.println("Failed to create copy of default classifier!"); e.printStackTrace(); } }
/** * Creating classification model. * * @param indicesToRemove indexes of labels to be removed from dataset * @param classIndex index of the label tested as class * @param trainDataset the {@link weka.core.Instances} dataset on which the model should be learned * @return {@link weka.classifiers.meta.FilteredClassifier} classification model * @throws Exception */ private FilteredClassifier buildModel(int[] indicesToRemove, int classIndex, Instances trainDataset) throws Exception { FilteredClassifier model = new FilteredClassifier(); model.setClassifier( AbstractClassifier.makeCopy(baseLearner)); Remove remove = new Remove(); remove.setAttributeIndicesArray(indicesToRemove); remove.setInputFormat(trainDataset); remove.setInvertSelection(false); model.setFilter(remove); trainDataset.setClassIndex(classIndex); model.buildClassifier(trainDataset); int foldHash = trainDataset.toString().hashCode(); String modelKey = createKey(indicesToRemove, foldHash); existingModels.put(modelKey, model); return model; }
try { clsCopy = AbstractClassifier.makeCopy(CLASSIFIER);
protected void buildInternal(MultiLabelInstances train) throws Exception { ensemble = new Classifier[numLabels]; correspondence = new String[numLabels]; for (int i = 0; i < numLabels; i++) { correspondence[i] = train.getDataSet().attribute(labelIndices[i]).name(); } debug("preparing shell"); brt = new BinaryRelevanceTransformation(train); for (int i = 0; i < numLabels; i++) { ensemble[i] = AbstractClassifier.makeCopy(baseClassifier); Instances shell = brt.transformInstances(i); debug("Bulding model " + (i + 1) + "/" + numLabels); ensemble[i].buildClassifier(shell); } }