public void release() { if (mCameraSource != null) { mCameraSource.release(); mCameraSource = null; } }
public void stop() { if (mCameraSource != null) { mCameraSource.stop(); } }
@Override public boolean setFocus(int id) { return mDelegate.setFocus(id); }
/** * Creates a new frame based on the original frame, with additional width on the right to * increase the size to avoid the bug in the underlying face detector. */ private Frame padFrameRight(Frame originalFrame, int newWidth) { Frame.Metadata metadata = originalFrame.getMetadata(); int width = metadata.getWidth(); int height = metadata.getHeight(); Log.i(TAG, "Padded image from: " + width + "x" + height + " to " + newWidth + "x" + height); ByteBuffer origBuffer = originalFrame.getGrayscaleImageData(); int origOffset = origBuffer.arrayOffset(); byte[] origBytes = origBuffer.array(); // This can be changed to just .allocate in the future, when Frame supports non-direct // byte buffers. ByteBuffer paddedBuffer = ByteBuffer.allocateDirect(newWidth * height); int paddedOffset = paddedBuffer.arrayOffset(); byte[] paddedBytes = paddedBuffer.array(); Arrays.fill(paddedBytes, (byte) 0); for (int y = 0; y < height; ++y) { int origStride = origOffset + y * width; int paddedStride = paddedOffset + y * newWidth; System.arraycopy(origBytes, origStride, paddedBytes, paddedStride, width); } return new Frame.Builder() .setImageData(paddedBuffer, newWidth, height, ImageFormat.NV21) .setId(metadata.getId()) .setRotation(metadata.getRotation()) .setTimestampMillis(metadata.getTimestampMillis()) .build(); }
/** * Creates the face detector and the camera. */ private void createCameraSource() { Context context = getApplicationContext(); FaceDetector detector = createFaceDetector(context); int facing = CameraSource.CAMERA_FACING_FRONT; if (!mIsFrontFacing) { facing = CameraSource.CAMERA_FACING_BACK; } // The camera source is initialized to use either the front or rear facing camera. We use a // relatively low resolution for the camera preview, since this is sufficient for this app // and the face detector will run faster at lower camera resolutions. // // However, note that there is a speed/accuracy trade-off with respect to choosing the // camera resolution. The face detector will run faster with lower camera resolutions, // but may miss smaller faces, landmarks, or may not correctly detect eyes open/closed in // comparison to using higher camera resolutions. If you have any of these issues, you may // want to increase the resolution. mCameraSource = new CameraSource.Builder(context, detector) .setFacing(facing) .setRequestedPreviewSize(320, 240) .setRequestedFps(60.0f) .setAutoFocusEnabled(true) .build(); }
@Override public void release() { mDelegate.release(); }
final int kMinDimension = 147; final int kDimensionLower = 640; int width = frame.getMetadata().getWidth(); int height = frame.getMetadata().getHeight(); return mDelegate.detect(frame);
@Override public boolean isOperational() { return mDelegate.isOperational(); }
Frame.Metadata metadata = originalFrame.getMetadata(); int width = metadata.getWidth(); int height = metadata.getHeight(); ByteBuffer origBuffer = originalFrame.getGrayscaleImageData(); int origOffset = origBuffer.arrayOffset(); byte[] origBytes = origBuffer.array(); return new Frame.Builder() .setImageData(paddedBuffer, width, newHeight, ImageFormat.NV21) .setId(metadata.getId()) .setRotation(metadata.getRotation()) .setTimestampMillis(metadata.getTimestampMillis()) .build();
public void release() { if (mCameraSource != null) { mCameraSource.release(); mCameraSource = null; } }
/** * Releases the underlying receiver. This is only safe to do after the associated thread * has completed, which is managed in camera source's release method above. */ @SuppressLint("Assert") void release() { assert (mProcessingThread.getState() == State.TERMINATED); mDetector.release(); mDetector = null; }
public void stop() { if (mCameraSource != null) { mCameraSource.stop(); } }
public void release() { if (mCameraSource != null) { mCameraSource.release(); mCameraSource = null; } }
/** * Releases the underlying receiver. This is only safe to do after the associated thread * has completed, which is managed in camera source's release method above. */ @SuppressLint("Assert") void release() { assert (mProcessingThread.getState() == State.TERMINATED); mDetector.release(); mDetector = null; }
public void stop() { if (mCameraSource != null) { mCameraSource.stop(); } }
public void onClick(View v) { mIsFrontFacing = !mIsFrontFacing; if (mCameraSource != null) { mCameraSource.release(); mCameraSource = null; } createCameraSource(); startCameraSource(); } };
/** * Releases the underlying receiver. This is only safe to do after the associated thread * has completed, which is managed in camera source's release method above. */ @SuppressLint("Assert") void release() { assert (processingThread.getState() == State.TERMINATED); mDetector.release(); mDetector = null; }
/** * Releases the resources associated with the camera source, the associated detector, and the * rest of the processing pipeline. */ @Override protected void onDestroy() { super.onDestroy(); if (mCameraSource != null) { mCameraSource.release(); } }
/** * Releases the resources associated with the camera source, the associated detector, and the * rest of the processing pipeline. */ @Override protected void onDestroy() { super.onDestroy(); if (mCameraSource != null) { mCameraSource.release(); } }
/** * Releases the resources associated with the camera source, the associated detectors, and the * rest of the processing pipeline. */ @Override protected void onDestroy() { super.onDestroy(); if (mCameraSource != null) { mCameraSource.release(); } }