ComputationGraph network = (ComputationGraph) obj; boolean paramsEquals = network.params().equals(params()); boolean confEquals = getConfiguration().equals(network.getConfiguration()); boolean updaterEquals = getUpdater().equals(network.getUpdater()); return paramsEquals && confEquals && updaterEquals;
public static void main(String[] args) throws Exception { //Define a simple ComputationGraph: ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.XAVIER) .updater(new Nesterovs(0.01, 0.9)) .graphBuilder() .addInputs("in") .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH).build(), "in") .addLayer("layer1", new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX).nIn(3).nOut(3).build(), "layer0") .setOutputs("layer1") .backprop(true).pretrain(false).build(); ComputationGraph net = new ComputationGraph(conf); net.init(); //Save the model File locationToSave = new File("model/MyComputationGraph.zip"); //Where to save the network. Note: the file is in .zip format - can be opened externally boolean saveUpdater = true; //Updater: i.e., the state for Momentum, RMSProp, Adagrad etc. Save this if you want to train your network more in the future ModelSerializer.writeModel(net, locationToSave, saveUpdater); //Load the model ComputationGraph restored = ModelSerializer.restoreComputationGraph(locationToSave); System.out.println("Saved and loaded parameters are equal: " + net.params().equals(restored.params())); System.out.println("Saved and loaded configurations are equal: " + net.getConfiguration().equals(restored.getConfiguration())); }