private static StageTreeClassifier buildCascade(OCVHaarClassifierNode root) throws IOException { return new StageTreeClassifier(root.width, root.height, root.name, root.hasTiltedFeatures, buildStages(root.stages)); }
@Override public void perform(IntRange range) { for (int iy = range.start; iy < range.stop; iy += range.incr) { final int y = Math.round(iy * ystep); for (int ix = startX, xstep = 0; ix < stopX; ix += xstep) { final int x = Math.round(ix * ystep); final int result = cascade.classify(sat, x, y); if (result > 0) { synchronized (results) { results.add(new Rectangle(x, y, windowWidth, windowHeight)); } } // if there is no hint of detection, then increase the // step size xstep = result == 0 ? smallStep : bigStep; } } } }, threadPool);
@Override public String toString() { return "HaarCascadeDetector[cascade=" + detector.getClassifier().getName() + "]"; }
final StageTreeClassifier classifier = new StageTreeClassifier(19, 19, "test cascade", false, root); classifier.setScale(1); if ((classifier.classify(positive.get(i), 0, 0) == 1) != AdaBoost.classify(data.getInstanceFeature(i), ensemble)) System.out.println("ERROR"); if ((classifier.classify(negative.get(i), 0, 0) == 1) != AdaBoost.classify( data.getInstanceFeature(i + positive.size()), ensemble)) System.out.println(classifier.classify(negative.get(i), 0, 0) + " " + AdaBoost.classify( data.getInstanceFeature(i + positive.size()), ensemble)); System.out.println("ERROR2");
cascade.setScale(factor);
/** * Recursively update the caches of all the stages to reflect the current * scale. * * @param s * the stage to update */ private void updateCaches(Stage s) { s.updateCaches(this); if (s.successStage != null) updateCaches(s.successStage); if (s.failureStage != null) updateCaches(s.failureStage); }
final float wvNorm = computeWindowVarianceNorm(sat, x, y);
final StageTreeClassifier classifier = new StageTreeClassifier(19, 19, "test cascade", false, root); classifier.setScale(1); if ((classifier.classify(positive.get(i), 0, 0) == 1) != AdaBoost.classify(data.getInstanceFeature(i), ensemble)) System.out.println("ERROR"); if ((classifier.classify(negative.get(i), 0, 0) == 1) != AdaBoost.classify( data.getInstanceFeature(i + positive.size()), ensemble)) System.out.println(classifier.classify(negative.get(i), 0, 0) + " " + AdaBoost.classify( data.getInstanceFeature(i + positive.size()), ensemble)); System.out.println("ERROR2");
cascade.setScale(factor);
/** * Recursively update the caches of all the stages to reflect the current * scale. * * @param s * the stage to update */ private void updateCaches(Stage s) { s.updateCaches(this); if (s.successStage != null) updateCaches(s.successStage); if (s.failureStage != null) updateCaches(s.failureStage); }
final float wvNorm = computeWindowVarianceNorm(sat, x, y);
@Override public void perform(IntRange range) { for (int iy = range.start; iy < range.stop; iy += range.incr) { final int y = Math.round(iy * ystep); for (int ix = startX, xstep = 0; ix < stopX; ix += xstep) { final int x = Math.round(ix * ystep); final int result = cascade.classify(sat, x, y); if (result > 0) { synchronized (results) { results.add(new Rectangle(x, y, windowWidth, windowHeight)); } } // if there is no hint of detection, then increase the // step size xstep = result == 0 ? smallStep : bigStep; } } } }, threadPool);
private static StageTreeClassifier buildCascade(OCVHaarClassifierNode root) throws IOException { return new StageTreeClassifier(root.width, root.height, root.name, root.hasTiltedFeatures, buildStages(root.stages)); }
/** * Set the current detection scale. This must be called before calling * {@link #classify(SummedSqTiltAreaTable, int, int)}. * <p> * Internally, this goes through all the stages and their individual * classifiers and pre-caches information related to the current scale to * avoid lots of expensive recomputation of values that don't change for a * given scale. * * @param scale * the current scale */ public void setScale(float scale) { this.cachedScale = scale; // following the OCV code... -2 to make a slightly smaller box within // window cachedW = Math.round(scale * (width - 2)); cachedH = Math.round(scale * (height - 2)); cachedInvArea = 1.0f / (cachedW * cachedH); updateCaches(root); }
@Override public int hashCode() { int hashCode = HashCodeUtil.SEED; hashCode = HashCodeUtil.hash(hashCode, this.detector.getMinimumDetectionSize()); hashCode = HashCodeUtil.hash(hashCode, this.detector.getScaleFactor()); hashCode = HashCodeUtil.hash(hashCode, this.detector.getClassifier().getName()); hashCode = HashCodeUtil.hash(hashCode, this.groupingFilter); hashCode = HashCodeUtil.hash(hashCode, this.histogramEqualize); return hashCode; }
final int x = Math.round(ix * ystep); final int result = cascade.classify(sat, x, y);
/** * Set the current detection scale. This must be called before calling * {@link #classify(SummedSqTiltAreaTable, int, int)}. * <p> * Internally, this goes through all the stages and their individual * classifiers and pre-caches information related to the current scale to * avoid lots of expensive recomputation of values that don't change for a * given scale. * * @param scale * the current scale */ public void setScale(float scale) { this.cachedScale = scale; // following the OCV code... -2 to make a slightly smaller box within // window cachedW = Math.round(scale * (width - 2)); cachedH = Math.round(scale * (height - 2)); cachedInvArea = 1.0f / (cachedW * cachedH); updateCaches(root); }
final int x = Math.round(ix * ystep); final int result = cascade.classify(sat, x, y);