@Override public void initializeBackend() { this.backend = new org.deeplearning4j.nn.conf.layers.ActivationLayer(); this.backend.setActivationFn(new ActivationReLU()); } }
@Override public InputType getOutputType(int layerIndex, InputType inputType) { if (inputType == null) throw new IllegalStateException("Invalid input type: null for layer name \"" + getLayerName() + "\""); return inputType; }
@OptionMetadata( displayName = "activation function", description = "The activation function to use (default = Identity).", commandLineParamName = "activation", commandLineParamSynopsis = "-activation <specification>", displayOrder = 1 ) public Activation getActivationFunction() { return Activation.create(backend.getActivationFn()); }
@Override @SuppressWarnings("unchecked") public ActivationLayer build() { return new ActivationLayer(this); } }
public void setActivationFunction(Activation activationFn) { backend.setActivationFn(activationFn.getBackend()); }
/** * Get layer output type. * * @param inputType Array of InputTypes * @return output type as InputType * @throws InvalidKerasConfigurationException */ public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException { if (inputType.length > 1) throw new InvalidKerasConfigurationException( "Keras Activation layer accepts only one input (received " + inputType.length + ")"); return this.getActivationLayer().getOutputType(-1, inputType[0]); }
@Override public Layer instantiate(NeuralNetConfiguration conf, Collection<IterationListener> iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams) { org.deeplearning4j.nn.layers.ActivationLayer ret = new org.deeplearning4j.nn.layers.ActivationLayer(conf); ret.setListeners(iterationListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams); ret.setParamTable(paramTable); ret.setConf(conf); return ret; }
@Override public INDArray activate(boolean training) { if (input == null) { throw new IllegalArgumentException("Cannot do forward pass with null input " + layerId()); } applyDropOutIfNecessary(training); INDArray in; if (training) { //dup required: need to keep original input for backprop in = input.dup(); } else { in = input; } //return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), in)); return layerConf().getActivationFn().getActivation(in, training); }
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { INDArray delta = layerConf().getActivationFn().backprop(input.dup(), epsilon).getFirst(); //TODO handle activation function params if (maskArray != null) { delta.muliColumnVector(maskArray); } Gradient ret = new DefaultGradient(); return new Pair<>(ret, delta); }