wrapper.fit(trainIter); time = System.currentTimeMillis() - time; log.info("*** Completed epoch {}, time: {} ***", i, time);
log.info("Using workspaceMode {} for training", workspaceMode.name()); stopFit.set(false); createZooIfNeccessary(false); double score = getScore(locker); averageUpdatersState(locker, score); close(); } catch (Exception e) { throw new RuntimeException(e);
wrapper.fit(train); } else wrapper.fit(trainMulti); } catch (Exception e) { log.warn("Early stopping training terminated due to exception at epoch {}, iteration {}", epochCount, wrapper.shutdown(); this.wrapper = null; epochTerminate = true; termReason = c; wrapper.stopFit(); break; wrapper.shutdown(); this.wrapper = null;
wrapper.setListeners(remoteUIRouter, new StatsListener(null)); wrapper.fit(dataSetIterator); ModelSerializer.writeModel(model, new File(modelOutputPath), true); wrapper.setListeners(remoteUIRouter, new StatsListener(null)); wrapper.fit(iterator); ModelSerializer.writeModel(model, new File(modelOutputPath), true);
ParallelWrapper wrapper = new ParallelWrapper(model, workers, prefetchSize); wrapper.averagingFrequency = this.averagingFrequency; wrapper.reportScore = this.reportScore; wrapper.gradientsAccumulator = this.accumulator; wrapper.init(); wrapper.setListeners(modelListeners);
/** * Set the listeners, along with a StatsStorageRouter that the results will be shuffled to (in the case of any listeners * that implement the {@link RoutingIterationListener} interface) * * @param statsStorage Stats storage router to place the results into * @param listeners Listeners to set */ public void setListeners(StatsStorageRouter statsStorage, IterationListener... listeners) { setListeners(statsStorage, Arrays.asList(listeners)); }
@Override public void iterationDone(Model model, int iteration) { double scoreLocal = model.score(); if (scoreLocal != scoreLocal) { numNanEncounteredConsecutively++; } else { numNanEncounteredConsecutively = 0; score += score; n++; } if (numNanEncounteredConsecutively > 100) { wrapper.stopFit(); } } };
public SymmetricTrainer(@NonNull Model originalModel, int threadIdx, @NonNull WorkspaceMode mode, @NonNull ParallelWrapper wrapper, boolean useMDS) { super(); this.useMDS = useMDS; this.originalModel = originalModel; this.threadId = threadIdx; this.workspaceMode = mode; this.parallelWrapper = wrapper; this.accumulator = wrapper.getGradientsAccumulator(); }
/** * This method causes all threads used for parallel training to stop */ public synchronized void shutdown() { try { close(); } catch (Exception e) { throw new RuntimeException(e); } }
/** * This method does post-initialization configuration of Model. * Good place to configure listeners and all such a things */ protected void postInit() { Collection<IterationListener> oldListeners = new ArrayList<>(); Collection<IterationListener> replicatedListeners = new ArrayList<>(); if (parallelWrapper.getListeners() != null) { oldListeners.addAll(parallelWrapper.getListeners()); } configureListeners(uuid, oldListeners, replicatedListeners); this.replicatedModel.setListeners(replicatedListeners); }
private void configureListeners(String workerUUID, Collection<IterationListener> oldListeners, Collection<IterationListener> replicatedListeners) { for (IterationListener listener : oldListeners) { IterationListener l = cloneListener(listener); if (l instanceof RoutingIterationListener) { RoutingIterationListener rl = (RoutingIterationListener) l; //We're assuming session ID is set by the original RoutingIterationListener constructor, which means // it will be synced across all cloned instances rl.setSessionID(((RoutingIterationListener) listener).getSessionID()); rl.setWorkerID(workerUUID); StatsStorageRouter currentRouter = ((RoutingIterationListener) listener).getStorageRouter(); if (currentRouter != null) { //User has set router on the listener/model, instead of via the // setListeners(StatsStorageRouter, ...) method rl.setStorageRouter(currentRouter); } else { rl.setStorageRouter(ParallelWrapper.this.storageRouter); } } replicatedListeners.add(l); } } }
log.info("Using workspaceMode {} for training", workspaceMode.name()); stopFit.set(false); createZooIfNeccessary(false); double score = getScore(locker); averageUpdatersState(locker, score); close(); } catch (Exception e) { throw new RuntimeException(e);
wrapper.fit(train); } else wrapper.fit(trainMulti); } catch (Exception e) { log.warn("Early stopping training terminated due to exception at epoch {}, iteration {}", epochCount, wrapper.shutdown(); this.wrapper = null; epochTerminate = true; termReason = c; wrapper.stopFit(); break; wrapper.shutdown(); this.wrapper = null;
wrapper.setListeners(remoteUIRouter, new StatsListener(null)); wrapper.fit(dataSetIterator); ModelSerializer.writeModel(model, new File(modelOutputPath), true); wrapper.setListeners(remoteUIRouter, new StatsListener(null)); wrapper.fit(iterator); ModelSerializer.writeModel(model, new File(modelOutputPath), true);
ParallelWrapper wrapper = new ParallelWrapper(model, workers, prefetchSize); wrapper.averagingFrequency = this.averagingFrequency; wrapper.reportScore = this.reportScore; wrapper.gradientsAccumulator = this.accumulator; wrapper.init(); wrapper.setListeners(modelListeners);
/** * Set the listeners, along with a StatsStorageRouter that the results will be shuffled to (in the case of any listeners * that implement the {@link RoutingIterationListener} interface) * * @param statsStorage Stats storage router to place the results into * @param listeners Listeners to set */ public void setListeners(StatsStorageRouter statsStorage, IterationListener... listeners) { setListeners(statsStorage, Arrays.asList(listeners)); }
public SymmetricTrainer(@NonNull Model originalModel, int threadIdx, @NonNull WorkspaceMode mode, @NonNull ParallelWrapper wrapper, boolean useMDS) { super(); this.useMDS = useMDS; this.originalModel = originalModel; this.threadId = threadIdx; this.workspaceMode = mode; this.parallelWrapper = wrapper; this.accumulator = wrapper.getGradientsAccumulator(); }
/** * This method causes all threads used for parallel training to stop */ public synchronized void shutdown() { try { close(); } catch (Exception e) { throw new RuntimeException(e); } }
/** * This method does post-initialization configuration of Model. * Good place to configure listeners and all such a things */ protected void postInit() { Collection<IterationListener> oldListeners = new ArrayList<>(); Collection<IterationListener> replicatedListeners = new ArrayList<>(); if (parallelWrapper.getListeners() != null) { oldListeners.addAll(parallelWrapper.getListeners()); } configureListeners(uuid, oldListeners, replicatedListeners); this.replicatedModel.setListeners(replicatedListeners); }