} catch (Exception e) { System.out.println("Exception in the TryLoad !" + e); e.printStackTrace(); throw new Exception("FATAL error: OpenKinect2 camera: driver could not load."); throw new Exception("FATAL error: OpenKinect2: no device connected!");
err = dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_video_set_iso_speed() Error " + err + ": Could not set maximum iso speed."); throw new Exception("dc1394_video_set_mode() Error " + err + ": Could not set video mode."); DC1394_QUERY_FROM_CAMERA, DC1394_QUERY_FROM_CAMERA); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_format7_set_roi() Error " + err + ": Could not set format7 mode."); throw new Exception("dc1394_video_set_framerate() Error " + err + ": Could not set framerate."); throw new Exception("dc1394_capture_setup() Error " + err + ": Could not setup camera-\n" + "make sure that the video mode and framerate are\nsupported by your camera."); err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_GAMMA, (float)gamma); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_feature_set_absolute_value() Error " + err + ": Could not set gamma."); err = dc1394_video_set_transmission(camera, DC1394_ON); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_video_set_transmission() Error " + err + ": Could not start camera iso transmission."); throw new Exception("dc1394_reset_bus() Error: Could not reset bus and try to start again.", ex);
if (format != null && format.length() > 0) { if ((f = av_find_input_format(format)) == null) { throw new Exception("av_find_input_format() error: Could not find input format \"" + format + "\"."); av_dict_set(options, "pixel_format", null, 0); if ((ret = avformat_open_input(oc, filename, f, options)) < 0) { throw new Exception("avformat_open_input() error " + ret + ": Could not open input \"" + filename + "\". (Has setFormat() been called?)"); throw new Exception("avformat_find_stream_info() error " + ret + ": Could not find stream information."); throw new Exception("Did not find a video or audio stream inside \"" + filename + "\" for videoStream == " + videoStream + " and audioStream == " + audioStream + "."); throw new Exception("avcodec_find_decoder() error: Unsupported video format or codec not found: " + video_par.codec_id() + "."); throw new Exception("avcodec_alloc_context3() error: Could not allocate video decoding context."); throw new Exception("avcodec_parameters_to_context() error: Could not copy the video stream parameters."); throw new Exception("avcodec_open2() error " + ret + ": Could not open video codec."); throw new Exception("av_frame_alloc() error: Could not allocate raw picture frame."); throw new Exception("av_frame_alloc() error: Could not allocate RGB picture frame."); throw new Exception("avcodec_find_decoder() error: Unsupported audio format or codec not found: " + audio_par.codec_id() + "."); throw new Exception("avcodec_alloc_context3() error: Could not allocate audio decoding context.");
fds.events(POLLIN); if (poll(fds, 1, timeout) == 0) { throw new Exception("poll() Error: Timeout occured. (Has start() been called?)"); int err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, raw_image[i]); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_capture_dequeue(WAIT) Error " + err + ": Could not capture a frame. (Has start() been called?)"); err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, raw_image[i]); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_capture_dequeue(POLL) Error " + err + ": Could not capture a frame."); frame.color_filter(c); if (err != DC1394_SUCCESS) { throw new Exception("dc1394_debayer_frames() Error " + err + ": Could not debayer frame."); throw new Exception("dc1394_convert_frames() Error " + err + ": Could not convert frame.");
if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("GetTriggerMode() Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("SetTriggerMode() Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("ReadRegister(IMAGE_DATA_FORMAT, regOut) Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("WriteRegister(IMAGE_DATA_FORMAT, reg) Error " + error.GetDescription()); error = camera.GetProperty(gammaProp); if (error.notEquals(PGRERROR_OK)) { throw new FrameGrabber.Exception("GetProperty(gammaProp) Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("SetProperty(gammaProp) Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) { PrintError(error); throw new FrameGrabber.Exception("StartCapture() Error " + error.GetDescription()); if (error.notEquals(PGRERROR_OK)) {
public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames) throws Exception { if (oc == null || oc.isNull()) { throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null)) { return null;
throw new Exception("avformat_seek_file() error " + ret + ": Could not seek file to timestamp " + timestamp + ".");
public Frame grab() throws FrameGrabber.Exception { Error error = camera.RetrieveBuffer(raw_image); if (error.notEquals(PGRERROR_OK)) { throw new FrameGrabber.Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)"); error = camera.ReadRegister(IMAGE_DATA_FORMAT, regOut); if (error.notEquals(PGRERROR_OK)) { throw new FrameGrabber.Exception("flycaptureGetCameraRegister() Error " + error); throw new FrameGrabber.Exception("raw_image.Convert Error " + error);
throw new Exception("Error on InputStream.close(): ", ex); } finally { inputStreams.remove(oc);
av_get_default_channel_layout(audio_c.channels()), audio_c.sample_fmt(), audio_c.sample_rate(), 0, null); if (samples_convert_ctx == null) { throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context."); } else if ((ret = swr_init(samples_convert_ctx)) < 0) { throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context."); throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
throw new Exception("FATAL error: Realsense camera: " + deviceNumber + " not connected/found");
throw new FrameGrabber.Exception(null, ex);
throw (FrameGrabber.Exception)t; } else { throw new FrameGrabber.Exception("Failed to create " + settings.getFrameGrabber(), t);
throw new Exception("freenect_sync_get_video() Error " + err + ": Failed to get video synchronously.");
throw new Exception("freenect_sync_get_depth() Error " + err + ": Failed to get depth synchronously.");
imageHeight > 0 ? imageHeight : 480, connection)) { myVideoInput = null; throw new Exception("videoInput.setupDevice() Error: Could not setup device."); format.equals("VI_NTSC_433") ? VI_NTSC_433 : -1; if (f >= 0 && !myVideoInput.setFormat(deviceNumber, f)) { throw new Exception("videoInput.setFormat() Error: Could not set format " + format + ".");
@Override public void stop() throws FrameGrabber.Exception { if (!started) { throw new IllegalStateException(); } try { Future<Void> vf = dualExecutor.submit(() -> { videoGrabber.stop(); videoGrabber.release(); return null; }); Future<Void> af = dualExecutor.submit(() -> { audioGrabber.stop(); audioGrabber.release(); return null; }); try { vf.get(); af.get(); } catch (InterruptedException | ExecutionException ex) { throw new FrameGrabber.Exception(null, ex); } started = false; } finally { dualExecutor.shutdown(); dualExecutor = null; } } }
/** Grab one frame; * the caller have to make a copy of returned image before processing. * * It will throw null pointer exception if not started before grabbing. * @return "read-only" RGB, 4-channel or GRAY/1-channel image, it throws exception if no image is available */ @Override public Frame grab() throws Exception { IplImage img = null; switch (triggered) { case NO_TRIGGER: img = grab_RGB4(); break; case HAS_FRAME: triggered = Triggered.NO_TRIGGER; img = makeImage(ps3_frame); break; case NO_FRAME: triggered = Triggered.NO_TRIGGER; return null; default: // just schizophrenia - for future enhancement throw new Exception("Int. error - unknown triggering state"); } if ((img != null) && (imageMode == ImageMode.GRAY)) { cvCvtColor(img, image_1ch, CV_RGB2GRAY); img = image_1ch; } return converter.convert(img); }