/** * Print header. * * @param log the log */ protected void printHeader(@Nonnull NotebookOutput log) { log.h1("Training Characteristics"); }
/** * Print header. * * @param log the log */ protected void printHeader(@Nonnull NotebookOutput log) { log.h1("Training Characteristics"); }
/** * Print header. * * @param log the log */ protected void printHeader(@Nonnull NotebookOutput log) { log.h1("Training Characteristics"); }
/** * Test tolerance statistics. * * @param log * @param reference the reference * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer reference, @Nonnull final Tensor... inputPrototype) { log.h1("Batch Execution"); log.p("Most layers, including this one, should behave the same no matter how the items are split between batches. We verify this:"); return log.eval(() -> { return test(reference, inputPrototype); }); }
/** * Test tolerance statistics. * * @param log * @param reference the reference * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer reference, @Nonnull final Tensor... inputPrototype) { log.h1("Batch Execution"); log.p("Most layers, including this one, should behave the same no matter how the items are split between batches. We verify this:"); return log.eval(() -> { return test(reference, inputPrototype); }); }
/** * Test tolerance statistics. * * @param log * @param reference the reference * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer reference, @Nonnull final Tensor... inputPrototype) { log.h1("Batch Execution"); log.p("Most layers, including this one, should behave the same no matter how the items are split between batches. We verify this:"); return log.eval(() -> { return test(reference, inputPrototype); }); }
/** * Test tolerance statistics. * * @param output * @param subject the subject * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput output, final Layer subject, @Nonnull final Tensor... inputPrototype) { output.h1("Reference Implementation"); output.p("This key is an alternate implementation which is expected to behave the same as the following key:"); output.run(() -> { log.info(new GsonBuilder().setPrettyPrinting().create().toJson(reference.getJson())); }); output.p("We measureStyle the agreement between the two layers in a random execution:"); return output.eval(() -> { return test(subject, inputPrototype); }); }
/** * Test tolerance statistics. * * @param output * @param subject the subject * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput output, final Layer subject, @Nonnull final Tensor... inputPrototype) { output.h1("Reference Implementation"); output.p("This key is an alternate implementation which is expected to behave the same as the following key:"); output.run(() -> { log.info(new GsonBuilder().setPrettyPrinting().create().toJson(reference.getJson())); }); output.p("We measureStyle the agreement between the two layers in a random execution:"); return output.eval(() -> { return test(subject, inputPrototype); }); }
/** * Test tolerance statistics. * * @param output * @param subject the subject * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput output, final Layer subject, @Nonnull final Tensor... inputPrototype) { output.h1("Reference Implementation"); output.p("This layer is an alternate implementation which is expected to behave the same as the following layer:"); output.run(() -> { log.info(new GsonBuilder().setPrettyPrinting().create().toJson(reference.getJson())); }); output.p("We measureStyle the agreement between the two layers in a random execution:"); return output.eval(() -> { return test(subject, inputPrototype); }); }
/** * Test. * @param log * @param component the component * @param inputPrototype the input prototype */ @Nullable @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer component, @Nonnull final Tensor... inputPrototype) { log.h1("Performance"); if (component instanceof DAGNetwork) { TestUtil.instrumentPerformance((DAGNetwork) component); } log.p("Now we execute larger-scale runs to benchmark performance:"); log.run(() -> { test(component, inputPrototype); }); if (component instanceof DAGNetwork) { TestUtil.extractPerformance(log, (DAGNetwork) component); } return null; }
/** * Test. * @param log * @param component the component * @param inputPrototype the input prototype */ @Nullable @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer component, @Nonnull final Tensor... inputPrototype) { log.h1("Performance"); if (component instanceof DAGNetwork) { TestUtil.instrumentPerformance((DAGNetwork) component); } log.p("Now we execute larger-scale runs to benchmark performance:"); log.run(() -> { test(component, inputPrototype); }); if (component instanceof DAGNetwork) { TestUtil.extractPerformance(log, (DAGNetwork) component); } return null; }
/** * Test. * * @param log * @param component the component * @param inputPrototype the input prototype */ @Nullable @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer component, @Nonnull final Tensor... inputPrototype) { log.h1("Performance"); if (component instanceof DAGNetwork) { TestUtil.instrumentPerformance((DAGNetwork) component); } log.p("Now we execute larger-scale runs to benchmark performance:"); log.run(() -> { test(component, inputPrototype); }); if (component instanceof DAGNetwork) { TestUtil.extractPerformance(log, (DAGNetwork) component); } return null; }
public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer, @Nonnull final Tensor... inputPrototype) { if (!referenceIO.isEmpty()) { log.h1("Reference Input/Output Pairs"); log.p("Display pre-setBytes input/output example pairs:"); referenceIO.forEach((input, output) -> { }); } else { log.h1("Example Input/Output Pair"); log.p("Display input/output pairs from random executions:"); log.eval(() -> {
@Nullable @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer, final Tensor... inputPrototype) { log.h1("Serialization"); log.p("This apply will demonstrate the key's JSON serialization, and verify deserialization integrity.");
public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer, @Nonnull final Tensor... inputPrototype) { if (!referenceIO.isEmpty()) { log.h1("Reference Input/Output Pairs"); log.p("Display pre-setBytes input/output example pairs:"); referenceIO.forEach((input, output) -> { }); } else { log.h1("Example Input/Output Pair"); log.p("Display input/output pairs from random executions:"); log.eval(() -> {
public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer, @Nonnull final Tensor... inputPrototype) { if (!referenceIO.isEmpty()) { log.h1("Reference Input/Output Pairs"); log.p("Display pre-setBytes input/output example pairs:"); referenceIO.forEach((input, output) -> { }); } else { log.h1("Example Input/Output Pair"); log.p("Display input/output pairs from random executions:"); log.eval(() -> {
@Nullable @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, @Nonnull final Layer layer, final Tensor... inputPrototype) { log.h1("Serialization"); log.p("This apply will demonstrate the key's JSON serialization, and verify deserialization integrity.");
/** * Test tolerance statistics. * * @param log * @param layer the reference * @param inputPrototype the input prototype * @return the tolerance statistics */ @Override public ToleranceStatistics test(@Nonnull final NotebookOutput log, final Layer layer, @Nonnull final Tensor... inputPrototype) { log.h1("GPU/Cuda Behavior"); layer.setFrozen(false); if (null == layer) return new ToleranceStatistics(); ToleranceStatistics statistics = testInterGpu(log, layer, inputPrototype); try { statistics = statistics.combine(testNonstandardBounds(log, layer, inputPrototype)); statistics = statistics.combine(testNonstandardBoundsBackprop(log, layer, inputPrototype)); } catch (Throwable e) { logger.warn("Error testing support for tensor views", e); throw e; } return statistics; }
output.h1("Differential Validation"); ToleranceStatistics _statistics = new ToleranceStatistics(); final Tensor outputPrototype = SimpleEval.run(component, inputPrototype).getOutputAndFree();
log.h1("Differential Validation"); @Nonnull IOPair ioPair = new IOPair(component, inputPrototype[0]).invoke();