/** * Sets the parameters from the parent's parameters object, giving defaults to all * parameters declared in this object. **/ public Parameters(Learner.Parameters p) { super(p); baseLearner = (Learner) defaultBaseLearner.clone(); defaultPrediction = defaultDefaultPrediction; }
/** Sets all the default values. */ public Parameters() { weakLearner = (Learner) defaultWeakLearner.clone(); rounds = defaultRounds; }
/** * Sets the parameters from the parent's parameters object, giving defaults to all * parameters declared in this object. **/ public Parameters(Learner.Parameters p) { super(p); weakLearner = (Learner) defaultWeakLearner.clone(); rounds = defaultRounds; }
/** Sets all the default values. */ public Parameters() { baseLearner = (Learner) defaultBaseLearner.clone(); defaultPrediction = defaultDefaultPrediction; }
/** * Returns a deep clone of this learning algorithm. * TODO */ public Object clone() { NeuralNetLearner clone = null; try { clone = (NeuralNetLearner) super.clone(); } catch (Exception e) { System.err.println("Error cloning StochasticGradientDescent: " + e); System.exit(1); } return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { StochasticGradientDescent clone = null; try { clone = (StochasticGradientDescent) super.clone(); } catch (Exception e) { System.err.println("Error cloning StochasticGradientDescent: " + e); System.exit(1); } clone.weightVector = (SparseWeightVector) weightVector.clone(); return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { LinearThresholdUnit clone = (LinearThresholdUnit) super.clone(); if (weightVector != null) clone.weightVector = (SparseWeightVector) weightVector.clone(); return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { MuxLearner clone = null; try { clone = (MuxLearner) super.clone(); } catch (Exception e) { System.err.println("Error cloning MuxLearner: " + e); e.printStackTrace(); System.exit(1); } clone.baseLearner = (Learner) baseLearner.clone(); int N = network.size(); clone.network = new OVector(N); for (int i = 0; i < N; ++i) { Learner learner = (Learner) network.get(i); if (learner != null) clone.network.set(i, learner.clone()); } return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { SparseMIRA clone = null; try { clone = (SparseMIRA) super.clone(); } catch (Exception e) { System.err.println("Error cloning SparseMIRA: " + e); e.printStackTrace(); System.exit(1); } int N = network.size(); clone.network = new OVector(N); for (int i = 0; i < N; ++i) clone.network.add(((BiasedRandomWeightVector) network.get(i)).clone()); return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { SparseNetworkLearner clone = null; try { clone = (SparseNetworkLearner) super.clone(); } catch (Exception e) { System.err.println("Error cloning SparseNetworkLearner: " + e); e.printStackTrace(); System.exit(1); } clone.baseLTU = (LinearThresholdUnit) baseLTU.clone(); int N = network.size(); clone.network = new OVector(N); for (int i = 0; i < N; ++i) { LinearThresholdUnit ltu = (LinearThresholdUnit) network.get(i); if (ltu == null) clone.network.add(null); else clone.network.add(ltu.clone()); } return clone; }
/** Returns a deep clone of this learning algorithm. */ public Object clone() { NaiveBayes clone = (NaiveBayes) super.clone(); int N = network.size(); clone.network = new OVector(N); for (int i = 0; i < N; ++i) clone.network.add(((NaiveBayesVector) network.get(i)).clone()); return clone; }
weakLearners[i] = (Learner) weakLearner.clone(); weakLearners[i].setLabelLexicon(labelLexicon); weakLearners[i].learn((Object[]) sample);
Learner testLearner = clone ? (Learner) learner.clone() : learner; testLearner.doneLearning(); double result = 0;
l = (Learner) baseLearner.clone(); l.setLabelLexicon(labelLexicon); network.set(selections[i], l);
testLearner = (Learner) learner.clone(); testLearner.doneLearning();