@Override public void initialize(Map<String, Param> params) throws TikaConfigException { try { if (serialize) { if (cacheDir.exists()) { model = ModelSerializer.restoreComputationGraph(cacheDir); LOG.info("Preprocessed Model Loaded from {}", cacheDir); } else { LOG.warn("Preprocessed Model doesn't exist at {}", cacheDir); cacheDir.getParentFile().mkdirs(); ZooModel zooModel = VGG16.builder().build(); model = (ComputationGraph)zooModel.initPretrained(PretrainedType.IMAGENET); LOG.info("Saving the Loaded model for future use. Saved models are more optimised to consume less resources."); ModelSerializer.writeModel(model, cacheDir, true); } } else { LOG.info("Weight graph model loaded via dl4j Helper functions"); ZooModel zooModel = VGG16.builder().build(); model = (ComputationGraph)zooModel.initPretrained(PretrainedType.IMAGENET); } imageNetLabels = new ImageNetLabels(); available = true; } catch (Exception e) { available = false; LOG.warn(e.getMessage(), e); throw new TikaConfigException(e.getMessage(), e); } }
private ComputationGraph load(String confOut) throws IOException { ComputationGraph net = ModelSerializer.restoreComputationGraph(confOut); return net; }
private ComputationGraph load(String modelName) throws IOException { return ModelSerializer.restoreComputationGraph(modelName); }
/** * Load a computation graph from a file * @param file the file to get the computation graph from * @return the loaded computation graph * * @throws IOException */ public static ComputationGraph restoreComputationGraph(@NonNull File file) throws IOException { return restoreComputationGraph(file, true); }
/** * Load a computation graph from a InputStream * @param is the inputstream to get the computation graph from * @return the loaded computation graph * * @throws IOException */ public static ComputationGraph restoreComputationGraph(@NonNull InputStream is) throws IOException { return restoreComputationGraph(is, true); }
/** * Load a computation graph from a file * @param path path to the model file, to get the computation graph from * @return the loaded computation graph * * @throws IOException */ public static ComputationGraph restoreComputationGraph(@NonNull String path, boolean loadUpdater) throws IOException { return restoreComputationGraph(new File(path), loadUpdater); }
/** * Load a computation graph from a file * @param path path to the model file, to get the computation graph from * @return the loaded computation graph * * @throws IOException */ public static ComputationGraph restoreComputationGraph(@NonNull String path) throws IOException { return restoreComputationGraph(new File(path), true); }
/** * Load a computation graph from a InputStream * @param is the inputstream to get the computation graph from * @return the loaded computation graph * * @throws IOException */ public static ComputationGraph restoreComputationGraph(@NonNull InputStream is, boolean loadUpdater) throws IOException { File tmpFile = File.createTempFile("restore", "compGraph"); tmpFile.deleteOnExit(); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(tmpFile)); IOUtils.copy(is, bos); bos.flush(); IOUtils.closeQuietly(bos); return restoreComputationGraph(tmpFile, loadUpdater); }
log.warn("Tried multi layer network"); try { return ModelSerializer.restoreComputationGraph(new File(path), true); } catch (Exception e1) { log.warn("Tried computation graph"); } catch (Exception e4) { try { return ModelSerializer.restoreComputationGraph(new File(path), false); } catch (Exception e5) { try {
model = ModelSerializer.restoreComputationGraph(tmpFile, false);
m = ModelSerializer.restoreComputationGraph(modelFile, false); } else { m = ModelSerializer.restoreMultiLayerNetwork(modelFile, false);
return ModelSerializer.restoreMultiLayerNetwork(cachedFile); } else if (modelType() == ComputationGraph.class) { return ModelSerializer.restoreComputationGraph(cachedFile); } else { throw new UnsupportedOperationException(
} catch (Exception e) { try { return ModelSerializer.restoreComputationGraph(stream, true); } catch (Exception e1) { try { return ModelSerializer.restoreComputationGraph(stream, false);
public Model loadModel(String modelNamePrefix) throws IOException { Model model = null; String pathname = getPath(modelNamePrefix, "/%sModel.bin"); if (new File(pathname).exists()) { model = ModelSerializer.restoreMultiLayerNetwork(pathname); return model; } else { pathname = getPath(modelNamePrefix, "/%s-ComputationGraph.bin"); if (new File(pathname).exists()) { model = ModelSerializer.restoreComputationGraph(pathname); return model; } } MultiLayerNetwork net = null; if (!(new File(pathname).exists() || new File(getPath(modelNamePrefix, "/%sModelParams.bin")).exists())) { return null; } //Load parameters from disk: INDArray newParams; DataInputStream dis = new DataInputStream(new FileInputStream(getPath(modelNamePrefix, "/%sModelParams.bin"))); newParams = Nd4j.read(dis); //Load network configuration from disk: MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(FileUtils.readFileToString(new File(getPath(modelNamePrefix, "/%sModelConf.json")), Charset.defaultCharset())); //Create a MultiLayerNetwork from the saved configuration and parameters net = new MultiLayerNetwork(confFromJson); net.init(); net.setParameters(newParams); return net; }
@Override public void loadModel(Resource modelFile) { if(modelFile.getFileName().endsWith("zip")) { try(InputStream is = modelFile.getInputStream()) { net = ModelSerializer.restoreComputationGraph(is, true); setModel(modelFile); setModelAvailable(true); log.info("loaded ComputationGraph from " + modelFile.getFileName()); } catch (IOException ex) { log.error(ex.toString()); } } else { try(DataInputStream dis = new DataInputStream(modelFile.getInputStream())) { INDArray newParams = Nd4j.read(dis); ((MultiLayerNetwork)net).setParameters(newParams); setModel(modelFile); setModelAvailable(true); log.info("loaded MultiLayerNetwork from " + modelFile.getFileName()); } catch (IOException ex) { log.error(ex.toString()); } } }
public static void init() throws IOException { String modelPath = Properties.classifierModelPath(); labels = Properties.classifierLabels(); int[] format = Properties.classifierInputFormat(); loader = new NativeImageLoader(format[0], format[1], format[2]); model = ModelSerializer.restoreComputationGraph(modelPath); model.init(); }
public static void main(String[] args) throws Exception { //Define a simple ComputationGraph: ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.XAVIER) .updater(new Nesterovs(0.01, 0.9)) .graphBuilder() .addInputs("in") .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH).build(), "in") .addLayer("layer1", new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX).nIn(3).nOut(3).build(), "layer0") .setOutputs("layer1") .backprop(true).pretrain(false).build(); ComputationGraph net = new ComputationGraph(conf); net.init(); //Save the model File locationToSave = new File("model/MyComputationGraph.zip"); //Where to save the network. Note: the file is in .zip format - can be opened externally boolean saveUpdater = true; //Updater: i.e., the state for Momentum, RMSProp, Adagrad etc. Save this if you want to train your network more in the future ModelSerializer.writeModel(net, locationToSave, saveUpdater); //Load the model ComputationGraph restored = ModelSerializer.restoreComputationGraph(locationToSave); System.out.println("Saved and loaded parameters are equal: " + net.params().equals(restored.params())); System.out.println("Saved and loaded configurations are equal: " + net.getConfiguration().equals(restored.getConfiguration())); }
log.info("Load model..."); model = ModelSerializer.restoreComputationGraph(modelFilename); } else { log.info("Build model...");