import android.graphics.YuvImage; @Override public void onPreviewFrame(byte[] data, Camera camera) { try { Camera.Parameters parameters = camera.getParameters(); Size size = parameters.getPreviewSize(); YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); File file = new File(Environment.getExternalStorageDirectory() .getPath() + "/out.jpg"); FileOutputStream filecon = new FileOutputStream(file); image.compressToJpeg( new Rect(0, 0, image.getWidth(), image.getHeight()), 90, filecon); } catch (FileNotFoundException e) { Toast toast = Toast .makeText(getBaseContext(), e.getMessage(), 1000); toast.show(); } }
/** * Reads, one time, values from the camera that are needed by the app. */ void initFromCameraParameters(Camera camera) { Camera.Parameters parameters = camera.getParameters(); previewFormat = parameters.getPreviewFormat(); previewFormatString = parameters.get("preview-format"); Log.d(TAG, "Default preview format: " + previewFormat + '/' + previewFormatString); WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = manager.getDefaultDisplay(); screenResolution = new Point(display.getWidth(), display.getHeight()); Log.d(TAG, "Screen resolution: " + screenResolution); Point screenResolutionForCamera = new Point(); screenResolutionForCamera.x = screenResolution.x; screenResolutionForCamera.y = screenResolution.y; // preview size is always something like 480*320, other 320*480 if (screenResolution.x < screenResolution.y) { screenResolutionForCamera.x = screenResolution.y; screenResolutionForCamera.y = screenResolution.x; } Log.i("#########", "screenX:" + screenResolutionForCamera.x + " screenY:" + screenResolutionForCamera.y); cameraResolution = getCameraResolution(parameters, screenResolutionForCamera); // cameraResolution = getCameraResolution(parameters, screenResolution); Log.d(TAG, "Camera resolution: " + screenResolution); }
/** * Reads, one time, values from the camera that are needed by the app. */ void initFromCameraParameters(Camera camera) { Camera.Parameters parameters = camera.getParameters(); previewFormat = parameters.getPreviewFormat(); previewFormatString = parameters.get("preview-format"); Log.d(TAG, "Default preview format: " + previewFormat + '/' + previewFormatString); WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = manager.getDefaultDisplay(); screenResolution = new Point(display.getWidth(), display.getHeight()); Log.d(TAG, "Screen resolution: " + screenResolution); // //Lemon add 扫描框修改,解决拉伸但导致成像模糊识别率很低。<<<<<<<<<<<<<<<<<<<<<<<<<<<< // Point screenResolutionForCamera = new Point(); // screenResolutionForCamera.x = screenResolution.x; // screenResolutionForCamera.y = screenResolution.y; // // preview size is always something like 480*320, other 320*480 // if (screenResolution.x < screenResolution.y) { // screenResolutionForCamera.x = screenResolution.y; // screenResolutionForCamera.y = screenResolution.x; // } //Lemon add 扫描框修改,解决拉伸>>>>>>>>>>>>>>>>>>>>>>>>>>>> //Lemon 扫描框修改,解决拉伸但导致成像模糊识别率很低 screenResolution改为screenResolutionForCamera);<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< cameraResolution = getCameraResolution(parameters, screenResolution); Log.d(TAG, "Camera resolution: " + screenResolution); //Lemon 扫描框修改,解决拉伸但导致成像模糊识别率很低 screenResolution改为screenResolutionForCamera);>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> }
Camera.Parameters parameters = camera.getParameters(); if (parameters.getPreviewFormat() == ImageFormat.NV21) { Rect rect = new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height); YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, parameters.getPreviewSize().width, parameters.getPreviewSize().height, null); ByteArrayOutputStream os = new ByteArrayOutputStream(); yuvimage.compressToJpeg(rect, 75, os); byte[] videoFrame = os.toByteArray(); //send the video frame to reciever }
Camera.Parameters cameraParam = mCamera.getParameters(); List<Size> sizes = cameraParam.getSupportedPreviewSizes(); Camera.Size previewSize = cameraParam.getPreviewSize(); previewFormat = cameraParam.getPreviewFormat(); int[] sizesForCamera = new int[]{sizes.get(0).width,sizes.get(0).height}; cameraParam.setPreviewSize(sizesForCamera[0], sizesForCamera[1]); mCamera.setParameters(cameraParam);
Camera.Parameters parameters = camera.getParameters(); Camera.Size size = parameters.getPreviewSize(); YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "YuvImage.jpg"); FileOutptStream filecon = new FileOutputStream(file); image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, filecon);
public LivePickerTextureView(Context context, Camera camera) { super(context); this.camera = camera; this.camera.getParameters().getPreviewFormat(); // Set a SurfaceHolder.Callback so we get notified when the // underlying surface is created and destroyed. this.setSurfaceTextureListener(this); this.previewSize = camera.getParameters().getPreviewSize(); this.selectedColor = new int[3]; }
public boolean startPreview() { Size imageSize = mCamera.getParameters().getPreviewSize(); int lineBytes = imageSize.width * ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat()) / 8; mCamera.setPreviewCallbackWithBuffer(this); mCamera.addCallbackBuffer(new byte[lineBytes * imageSize.height]); mCamera.addCallbackBuffer(new byte[lineBytes * imageSize.height]); mCamera.addCallbackBuffer(new byte[lineBytes * imageSize.height]); mCamera.startPreview(); return true; }
Camera.Parameters parameters = camera.getParameters(); ByteArrayOutputStream out = new ByteArrayOutputStream(); YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), parameters.getPreviewSize().width, parameters.getPreviewSize().height, null); yuvImage.compressToJpeg(new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height), 90, out); byte[] imageBytes = out.toByteArray(); Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length); out.flush(); out.close();
@Override public void onPreviewFrame(final byte[] data, Camera camera) { Camera.Parameters parameters = camera.getParameters(); int width = parameters.getPreviewSize().width; int height = parameters.getPreviewSize().height; YuvImage yuv = new YuvImage(data, parameters.getPreviewFormat(), width, height, null); ByteArrayOutputStream out = new ByteArrayOutputStream(); yuv.compressToJpeg(new Rect(0, 0, width, height), 50, out); byte[] bytes = out.toByteArray(); final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length); MyActivity.this.runOnUiThread(new Runnable() { @Override public void run() { ((ImageView) findViewById(R.id.loopback)).setImageBitmap(bitmap); } }); }
int bufferSize = (((videoWidth | 0xf) + 1) * videoHeight * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())) / 8;
@Override public void onPreviewFrame(byte[] data, Camera camera) { Size cameraResolution = resolution; PreviewCallback callback = this.callback; if (cameraResolution != null && callback != null) { try { if(data == null) { throw new NullPointerException("No preview data received"); } int format = camera.getParameters().getPreviewFormat(); SourceData source = new SourceData(data, cameraResolution.width, cameraResolution.height, format, getCameraRotation()); callback.onPreview(source); } catch (RuntimeException e) { // Could be: // java.lang.RuntimeException: getParameters failed (empty parameters) // IllegalArgumentException: Image data does not match the resolution Log.e(TAG, "Camera preview failed", e); callback.onPreviewError(e); } } else { Log.d(TAG, "Got preview callback, but no handler or resolution available"); if(callback != null) { // Should generally not happen callback.onPreviewError(new Exception("No resolution available")); } } } }
int bufferSize = (((videoWidth|0xf)+1) * videoHeight * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())) / 8;
int bufferSize = (((pushVideoWidth|0xf)+1) * pushVideoHeight * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat())) / 8;
/** * Reads, one time, values from the camera that are needed by the app. */ void initFromCameraParameters(Camera camera) { Camera.Parameters parameters = camera.getParameters(); previewFormat = parameters.getPreviewFormat(); previewFormatString = parameters.get("preview-format"); Log.d(TAG, "Default preview format: " + previewFormat + '/' + previewFormatString); WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); Display display = manager.getDefaultDisplay(); screenResolution = new Point(display.getWidth(), display.getHeight()); Log.d(TAG, "Screen resolution: " + screenResolution); // //Lemon add 扫描框修改,解决拉伸但导致成像模糊识别率很低。<<<<<<<<<<<<<<<<<<<<<<<<<<<< // Point screenResolutionForCamera = new Point(); // screenResolutionForCamera.x = screenResolution.x; // screenResolutionForCamera.y = screenResolution.y; // // preview size is always something like 480*320, other 320*480 // if (screenResolution.x < screenResolution.y) { // screenResolutionForCamera.x = screenResolution.y; // screenResolutionForCamera.y = screenResolution.x; // } //Lemon add 扫描框修改,解决拉伸>>>>>>>>>>>>>>>>>>>>>>>>>>>> //Lemon 扫描框修改,解决拉伸但导致成像模糊识别率很低 screenResolution改为screenResolutionForCamera);<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< cameraResolution = getCameraResolution(parameters, screenResolution); Log.d(TAG, "Camera resolution: " + screenResolution); //Lemon 扫描框修改,解决拉伸但导致成像模糊识别率很低 screenResolution改为screenResolutionForCamera);>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> }
@Override public void onPreviewFrame(byte[] data, Camera camera) { frameCount++; if ( frameCount % 3000 == 0 ) { Log.i("OnPre", "gc+"); System.gc(); Log.i("OnPre", "gc-"); } if (data == null) { Parameters params = camera.getParameters(); Size size = params.getPreviewSize(); int bufferSize = (((size.width|0x1f)+1) * size.height * ImageFormat.getBitsPerPixel(params.getPreviewFormat())) / 8; camera.addCallbackBuffer(new byte[bufferSize]); } else { if(isPushing || isRecording) { libPublisher.SmartPublisherOnCaptureVideoData(publisherHandle, data, data.length, currentCameraType, currentOrigentation); } camera.addCallbackBuffer(data); } } }
@Override public void onPreviewFrame(byte[] data, Camera camera) { pushFrameCount++; if ( pushFrameCount % 3000 == 0 ) { Log.i("OnPre", "gc+"); System.gc(); Log.i("OnPre", "gc-"); } if (data == null) { Parameters params = camera.getParameters(); Size size = params.getPreviewSize(); int bufferSize = (((size.width|0x1f)+1) * size.height * ImageFormat.getBitsPerPixel(params.getPreviewFormat())) / 8; camera.addCallbackBuffer(new byte[bufferSize]); } else { if( isPushStart ) { libPublisher.SmartPublisherOnCaptureVideoData(publisherHandle, data, data.length, pushCurrentCameraType, currentPushOrigentation); } camera.addCallbackBuffer(data); } }
@Override public void onPreviewFrame(byte[] data, Camera camera) { frameCount++; if (frameCount % 3000 == 0) { Log.i("OnPre", "gc+"); System.gc(); Log.i("OnPre", "gc-"); } if (data == null) { Parameters params = camera.getParameters(); Size size = params.getPreviewSize(); int bufferSize = (((size.width | 0x1f) + 1) * size.height * ImageFormat.getBitsPerPixel(params.getPreviewFormat())) / 8; camera.addCallbackBuffer(new byte[bufferSize]); } else { if (isRTSPPublisherRunning || isPushingRtmp || isRecording || isPushingRtsp) { libPublisher.SmartPublisherOnCaptureVideoData(publisherHandle, data, data.length, currentCameraType, currentOrigentation); } camera.addCallbackBuffer(data); } }
@Test public void testPreviewFormat() throws Exception { assertThat(parameters.getPreviewFormat()).isEqualTo(ImageFormat.NV21); parameters.setPreviewFormat(ImageFormat.JPEG); assertThat(parameters.getPreviewFormat()).isEqualTo(ImageFormat.JPEG); }
@Test public void testSetParameters() throws Exception { Camera.Parameters parameters = camera.getParameters(); assertThat(parameters.getPreviewFormat()).isEqualTo(ImageFormat.NV21); parameters.setPreviewFormat(ImageFormat.JPEG); camera.setParameters(parameters); assertThat(camera.getParameters().getPreviewFormat()).isEqualTo(ImageFormat.JPEG); }