public boolean isPretrainUpdaterBlock() { //All in block should be the same layer, and all be pretrain params ParamState vs = layersAndVariablesInBlock.get(0); return vs.getLayer().conf().getLayer().isPretrainParam(vs.getParamName()); }
public static boolean updaterConfigurationsEquals(Layer layer1, String param1, Layer layer2, String param2) { org.deeplearning4j.nn.conf.layers.Layer l1 = layer1.conf().getLayer(); org.deeplearning4j.nn.conf.layers.Layer l2 = layer2.conf().getLayer(); IUpdater u1 = l1.getIUpdaterByParam(param1); IUpdater u2 = l2.getIUpdaterByParam(param2); if (!u1.equals(u2)) { //Different updaters or different config return false; } //For updaters to be equal (and hence combinable), we require that: //(a) The updater-specific configurations are equal (inc. LR) //(b) The learning rate *schedules* are equal //(c) If one or more of the params are pretrainable params, they are in the same layer // This last point is necessary as we don't want to modify the pretrain gradient/updater state during // backprop, or modify the pretrain gradient/updater state of one layer while training another if (!lrSchedulesEqual(layer1, param1, layer2, param2)) { return false; } boolean isPretrainParam1 = layer1.conf().getLayer().isPretrainParam(param1); boolean isPretrainParam2 = layer2.conf().getLayer().isPretrainParam(param2); if (isPretrainParam1 || isPretrainParam2) { //One or both of params are pretrainable. //Either layers differ -> don't want to combine a pretrain updaters across layers //Or one is pretrain and the other isn't -> don't want to combine pretrain updaters within a layer return layer1 == layer2 && isPretrainParam1 && isPretrainParam2; } return true; }