FDet() { try { this._cascade = OCVHaarLoader.read(OCVHaarLoader.class.getResourceAsStream("haarcascade_frontalface_alt2.xml")); } catch (final Exception e) { throw new RuntimeException(e); } }
/** * Read the cascade from an OpenCV xml serialisation. Currently this only * supports the old-style cascade xml. * * @param is * the stream to read from * @return the cascade object * @throws IOException */ public static StageTreeClassifier read(InputStream is) throws IOException { final OCVHaarClassifierNode root = readXPP(is); return buildCascade(root); }
private static Classifier buildClassifier(final List<TreeNode> tree) { return buildClassifier(tree, tree.get(0)); }
final StageNode node = stageNodes.get(i); stages[i] = new Stage(node.threshold, buildClassifiers(node.trees), null, null); optimiseTree(root);
private static StageTreeClassifier buildCascade(OCVHaarClassifierNode root) throws IOException { return new StageTreeClassifier(root.width, root.height, root.name, root.hasTiltedFeatures, buildStages(root.stages)); }
checkNode(reader, OCV_STORAGE_NODE); checkNode(reader, SIZE_NODE); checkNode(reader, STAGES_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, TREES_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, FEATURE_NODE); checkNode(reader, RECTS_NODE); checkNode(reader, ANONYMOUS_NODE); regions.add(WeightedRectangle.parse(reader.nextText())); checkNode(reader, TILTED_NODE); final boolean tilted = "1".equals(reader.nextText()); checkNode(reader, FEATURE_NODE); checkNode(reader, THRESHOLD_NODE); final float threshold = (float) Double.parseDouble(reader.nextText());
final StageNode node = stageNodes.get(i); stages[i] = new Stage(node.threshold, buildClassifiers(node.trees), null, null); optimiseTree(root);
private static StageTreeClassifier buildCascade(OCVHaarClassifierNode root) throws IOException { return new StageTreeClassifier(root.width, root.height, root.name, root.hasTiltedFeatures, buildStages(root.stages)); }
checkNode(reader, OCV_STORAGE_NODE); checkNode(reader, SIZE_NODE); checkNode(reader, STAGES_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, TREES_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, ANONYMOUS_NODE); checkNode(reader, FEATURE_NODE); checkNode(reader, RECTS_NODE); checkNode(reader, ANONYMOUS_NODE); regions.add(WeightedRectangle.parse(reader.nextText())); checkNode(reader, TILTED_NODE); final boolean tilted = "1".equals(reader.nextText()); checkNode(reader, FEATURE_NODE); checkNode(reader, THRESHOLD_NODE); final float threshold = (float) Double.parseDouble(reader.nextText());
FDet() { try { this._cascade = OCVHaarLoader.read(OCVHaarLoader.class.getResourceAsStream("haarcascade_frontalface_alt2.xml")); } catch (final Exception e) { throw new RuntimeException(e); } }
/** * Read the cascade from an OpenCV xml serialisation. Currently this only * supports the old-style cascade xml. * * @param is * the stream to read from * @return the cascade object * @throws IOException */ public static StageTreeClassifier read(InputStream is) throws IOException { final OCVHaarClassifierNode root = readXPP(is); return buildCascade(root); }
private static Classifier buildClassifier(final List<TreeNode> tree) { return buildClassifier(tree, tree.get(0)); }
FDet(final String fname, final float img_scale, final float scale_factor, final int min_neighbours, final int min_size) throws IOException, XMLStreamException { final FileInputStream fis = new FileInputStream(fname); this._cascade = OCVHaarLoader.read(fis); fis.close(); this._img_scale = img_scale; this._scale_factor = scale_factor; this._min_neighbours = min_neighbours; this._min_size = min_size; this.setupDetector(); }
private static Classifier[] buildClassifiers(final List<List<TreeNode>> trees) { final Classifier[] classifiers = new Classifier[trees.size()]; for (int i = 0; i < classifiers.length; i++) { classifiers[i] = buildClassifier(trees.get(i)); } return classifiers; }
FDet(final String fname, final float img_scale, final float scale_factor, final int min_neighbours, final int min_size) throws IOException, XMLStreamException { final FileInputStream fis = new FileInputStream(fname); this._cascade = OCVHaarLoader.read(fis); fis.close(); this._img_scale = img_scale; this._scale_factor = scale_factor; this._min_neighbours = min_neighbours; this._min_size = min_size; this.setupDetector(); }
private static Classifier[] buildClassifiers(final List<List<TreeNode>> trees) { final Classifier[] classifiers = new Classifier[trees.size()]; for (int i = 0; i < classifiers.length; i++) { classifiers[i] = buildClassifier(trees.get(i)); } return classifiers; }
in = new FileInputStream(new File(cascadeResource)); final StageTreeClassifier cascade = OCVHaarLoader.read(in);
private static Classifier buildClassifier(final List<TreeNode> tree, TreeNode current) { final HaarFeatureClassifier fc = new HaarFeatureClassifier(current.feature, current.threshold, null, null); if (current.left_node == -1) { fc.left = new ValueClassifier(current.left_val); } else { fc.left = buildClassifier(tree, tree.get(current.left_node)); } if (current.right_node == -1) { fc.right = new ValueClassifier(current.right_val); } else { fc.right = buildClassifier(tree, tree.get(current.right_node)); } return fc; }
public static void main(String[] args) throws IOException { OCVHaarLoader.read(StageTreeClassifier.class .getResourceAsStream("haarcascade_frontalface_alt.xml"));
private static Classifier buildClassifier(final List<TreeNode> tree, TreeNode current) { final HaarFeatureClassifier fc = new HaarFeatureClassifier(current.feature, current.threshold, null, null); if (current.left_node == -1) { fc.left = new ValueClassifier(current.left_val); } else { fc.left = buildClassifier(tree, tree.get(current.left_node)); } if (current.right_node == -1) { fc.right = new ValueClassifier(current.right_val); } else { fc.right = buildClassifier(tree, tree.get(current.right_node)); } return fc; }