@Override public String toString() { return this.getModel().toString(); }
public static void main(String[] args) throws ExceptionHugin { //We load from Hugin format Domain huginBN = BNLoaderFromHugin.loadFromFile("./networks/simulated/WasteIncinerator.bn"); //Then, it is converted to AMIDST BayesianNetwork object BayesianNetwork amidstBN = BNConverterToAMIDST.convertToAmidst(huginBN); //Then, it is converted to Hugin Bayesian Network object huginBN = BNConverterToHugin.convertToHugin(amidstBN); System.out.println(amidstBN.toString()); System.out.println(huginBN.toString()); } }
public static void main(String[] args) throws Exception { //loads a BN model from Hugin Domain huginBN = BNLoaderFromHugin.loadFromFile("networks/dataWeka/asia.net"); //Converts the Hugin model to an AMIDST BayesianNetwork object BayesianNetwork amidstBN = BNConverterToAMIDST.convertToAmidst(huginBN); //Converts the AMIDST BayesianNetwork object to a Hugin Bayesian Network object huginBN = BNConverterToHugin.convertToHugin(amidstBN); System.out.println(amidstBN.toString()); System.out.println(huginBN.toString()); } }
public static void main(String[] args) throws Exception { //We can load a Bayesian network using the static class BayesianNetworkLoader BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("./networks/simulated/WasteIncinerator.bn"); //Now we print the loaded model System.out.println(bn.toString()); //Now we change the parameters of the model bn.randomInitialization(new Random(0)); //We can save this Bayesian network to using the static class BayesianNetworkWriter BayesianNetworkWriter.save(bn, "networks/simulated/tmp.bn"); } }
/** * This is the main method of the class which contains the sequence of executions included in the demo. * @param args input arguments (not used) * @throws Exception if an error occurs while reading the file. */ public static void main(String[] args) throws Exception { //Step 1. We show how to compute the monthly average value of the "expenses" variable. System.out.println("-----------------------CREDIT MONTHLY AVERAGE--------------------------"); BCC.computeMonthlyAverage(); System.out.println("-----------------------------------------------------------------------"); //Step 2. We build the NaiveBayes DAG with a global hidden var to track the concept drift System.out.println("--------------------------------DAG------------------------------------"); DAG dag = BCC.modelBuilding(); System.out.println(dag.toString()); System.out.println("-----------------------------------------------------------------------"); //Step 3. We set up the plateau structure use for learning System.out.println("------------------------DEFINING PLATEAU MODEL-------------------------"); ParallelSVB parallelSVB = BCC.plateuModelSetUp(dag); System.out.println("-----------------------------------------------------------------------"); //Step 4. We learn the model and print the results. System.out.println("------------------------------LEARNING---------------------------------"); BayesianNetwork bayesianNetwork = BCC.learnModel(parallelSVB); System.out.println("\n\nLearnt Bayesian network:\n\n"); System.out.println(bayesianNetwork.toString()); System.out.println("-----------------------------------------------------------------------"); }
public static void main(String[] args) { DynamicBayesianNetworkGenerator.setNumberOfContinuousVars(0); DynamicBayesianNetworkGenerator.setNumberOfDiscreteVars(5); DynamicBayesianNetworkGenerator.setNumberOfStates(2); DynamicBayesianNetworkGenerator.setNumberOfLinks(5); DynamicBayesianNetwork dynamicNaiveBayes = DynamicBayesianNetworkGenerator.generateDynamicNaiveBayes(new Random(0), 2, true); System.out.println("ORIGINAL DYNAMIC DAG:"); System.out.println(dynamicNaiveBayes.getDynamicDAG().toString()); //System.out.println(dynamicNaiveBayes.toString()); System.out.println(); //dynamicNaiveBayes.getDynamicVariables().getListOfDynamicVariables().forEach(var -> System.out.println(var.getName())); //dynamicNaiveBayes.getDynamicVariables().getListOfDynamicVariables().forEach(var -> System.out.println(var.getName())); BayesianNetwork bn = DynamicToStaticBNConverter.convertDBNtoBN(dynamicNaiveBayes,4); System.out.println("NEW STATIC DAG:"); System.out.println(); System.out.println(bn.getDAG().toString()); System.out.println(); System.out.println("ORIGINAL DYNAMIC BN:"); System.out.println(dynamicNaiveBayes.toString()); System.out.println("STATIC BN:"); System.out.println(bn.toString()); } }
public static void main(String[] args) throws IOException, ClassNotFoundException { int batchSize = 100; DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff");; //We can load a Bayesian network using the static class BayesianNetworkLoader DAG dag = DAGGenerator.getNaiveBayesStructure(data.getAttributes(), "B"); BayesianNetwork bn = new BayesianNetwork(dag); data.getAttributes().forEach(attribute -> System.out.println(attribute.getName())); //Now we print the loaded model System.out.println(bn.toString()); EF_BayesianNetwork efbn = new EF_BayesianNetwork(bn); SufficientStatistics sumSS = data.parallelStream(batchSize) .map(efbn::getSufficientStatistics) //see Program 6 .reduce(SufficientStatistics::sumVectorNonStateless).get(); //.reduce((v1,v2) -> {v1.sum(v2); return v1;}).get(); sumSS.divideBy(data.stream().count()); for(int i=0; i<sumSS.size(); i++) { System.out.println(sumSS.get(i)); } }
public static void main(String[] args) throws Exception { int sampleSize = 10000; BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("networks/simulated/asia.bn"); System.out.println(bn); BayesianNetworkSampler sampler = new BayesianNetworkSampler(bn); DataStream<DataInstance> data = sampler.sampleToDataStream(sampleSize); ParallelTAN tan = new ParallelTAN(); tan.setNumCores(4); tan.setNumSamplesOnMemory(1000); tan.setNameRoot("X"); tan.setNameTarget("E"); BayesianNetwork model = tan.learn(data); System.out.println(model.toString()); } }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParameterLearningAlgorithm object with the MaximumLikehood builder ParameterLearningAlgorithm parameterLearningAlgorithm = new ParallelMaximumLikelihood(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(getNaiveBayesStructure(data,0)); //We should invoke this method before processing any data parameterLearningAlgorithm.initLearning(); //Then we show how we can perform parameter learnig by a sequential updating of data batches. for (DataOnMemory<DataInstance> batch : data.iterableOverBatches(100)){ parameterLearningAlgorithm.updateModel(batch); } //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
System.out.println(bn.toString());
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParameterLearningAlgorithm object with the MaximumLikelihoodFading builder MaximumLikelihoodFading parameterLearningAlgorithm = new MaximumLikelihoodFading(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(MaximimumLikelihoodByBatchExample.getNaiveBayesStructure(data, 0)); //We fix the fading or forgeting factor parameterLearningAlgorithm.setFadingFactor(0.9); //We set the batch size which will be employed to learn the model parameterLearningAlgorithm.setWindowsSize(100); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVB parameterLearningAlgorithm = new SVB(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"H",2)); //We fix the size of the window, which must be equal to the size of the data batches we use for learning parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We should invoke this method before processing any data parameterLearningAlgorithm.initLearning(); //Then we show how we can perform parameter learning by a sequential updating of data batches. for (DataOnMemory<DataInstance> batch : data.iterableOverBatches(100)){ double log_likelhood_of_batch = parameterLearningAlgorithm.updateModel(batch); System.out.println("Log-Likelihood of Batch: "+ log_likelhood_of_batch); } //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
System.out.println(bn.toString());
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVB parameterLearningAlgorithm = new SVB(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"GlobalHidden", 2)); //We fix the size of the window parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
System.out.println(bn.toString());
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParallelMaximumLikelihood object with the MaximumLikehood builder ParallelMaximumLikelihood parameterLearningAlgorithm = new ParallelMaximumLikelihood(); //We activate the parallel mode. parameterLearningAlgorithm.setParallelMode(true); //We desactivate the debug mode. parameterLearningAlgorithm.setDebug(false); //We fix the DAG structure parameterLearningAlgorithm.setDAG(MaximimumLikelihoodByBatchExample.getNaiveBayesStructure(data, 0)); //We set the batch size which will be employed to learn the model in parallel parameterLearningAlgorithm.setWindowsSize(100); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVBFading parameterLearningAlgorithm = new SVBFading(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"GlobalHidden", 2)); //We fix the fading or forgeting factor parameterLearningAlgorithm.setFadingFactor(0.9); //We fix the size of the window parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParallelSVB object ParallelSVB parameterLearningAlgorithm = new ParallelSVB(); //We fix the number of cores we want to exploit parameterLearningAlgorithm.setNCores(4); //We fix the DAG structure, which is a Naive Bayes with a global latent binary variable parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(), "H", 2)); //We fix the size of the window parameterLearningAlgorithm.getSVBEngine().setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }