private static InputStream asInputStream(Object o) throws SQLException { if (o == null) { return null; } else if (o instanceof Blob) { return ((Blob) o).getBinaryStream(); } return (InputStream) o; }
public InputStream getBlobAsBinaryStream(ResultSet rs, int columnIndex) throws SQLException { Blob blob = rs.getBlob(columnIndex); if (blob == null) { return null; } return blob.getBinaryStream(); }
@Override @Nullable public InputStream getBlobAsBinaryStream(ResultSet rs, int columnIndex) throws SQLException { logger.debug("Returning BLOB as binary stream"); if (this.wrapAsLob) { Blob blob = rs.getBlob(columnIndex); return blob.getBinaryStream(); } else { return rs.getBinaryStream(columnIndex); } }
private static InputStream asInputStream(Object o) throws SQLException { if (o == null) { return null; } else if (o instanceof Blob) { return ((Blob) o).getBinaryStream(); } return (InputStream) o; }
/** * Blob转byte[] */ private static byte[] blobToBytes(Blob blob) { try (InputStream is = blob.getBinaryStream()) { byte[] b = new byte[(int) blob.length()]; is.read(b); return b; } catch (IOException | SQLException e) { logger.error(e.getMessage()); return null; } }
@Override public void write(PreparedStatement statement, int index, Blob value) throws SQLException { if (value == null) { statement.setNull(index, Types.VARBINARY); } else { statement.setBinaryStream(index, value.getBinaryStream(), value.length()); } } }
public void set(PreparedStatement stat, Object obj, int i) throws SQLException { if (null == obj) { stat.setNull(i, Types.BLOB); } else { Blob blob = (Blob) obj; stat.setBinaryStream(i, blob.getBinaryStream(), blob.length()); } }
private InputStream getInputStream(String resPath, ResultSet rs) throws SQLException, IOException { if (rs == null) { return null; } Blob blob = rs.getBlob(META_TABLE_CONTENT); if (blob == null || blob.length() == 0) { return openPushdown(resPath); // empty bytes is pushdown indicator } else { return blob.getBinaryStream(); } }
/** * Load a complex ndarray from a blob * * @param blob the blob to load from * @return the complex ndarray */ @Override public IComplexNDArray loadComplex(Blob blob) throws SQLException, IOException { DataInputStream dis = new DataInputStream(blob.getBinaryStream()); return Nd4j.readComplex(dis); }
@Override public InputStream convert(Blob original) { try { return original.getBinaryStream(); } catch (SQLException e) { throw new CannotConvertBetweenTypesException("Error while attempting to read data from Blob", e); } } }
private void printValue(ResultSet rs, int col, PrintStream out) throws SQLException { if (rawBlobs && rs.getMetaData().getColumnType(col) == Types.BLOB) { Blob blob = rs.getBlob(col); if (blob != null) { new StreamPumper(rs.getBlob(col).getBinaryStream(), out).run(); } } else { out.print(maybeQuote(rs.getString(col))); } }
@Override protected Object getJobDataFromBlob(ResultSet rs, String colName) throws ClassNotFoundException, IOException, SQLException { if (canUseProperties()) { Blob blobLocator = rs.getBlob(colName); InputStream binaryInput = null; try { if (null != blobLocator && blobLocator.length() > 0) { binaryInput = blobLocator.getBinaryStream(); } } catch (Exception ignore) { } return binaryInput; } return getObjectFromBlob(rs, colName); } }
@Override protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { Blob rsBlob = rs.getBlob( name ); if ( rsBlob == null || rsBlob.length() < HANAStreamBlobTypeDescriptor.this.maxLobPrefetchSize ) { return javaTypeDescriptor.wrap( rsBlob, options ); } Blob blob = new MaterializedBlob( DataHelper.extractBytes( rsBlob.getBinaryStream() ) ); return javaTypeDescriptor.wrap( blob, options ); }
@Override protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException { Blob rsBlob = rs.getBlob( name ); if ( rsBlob == null || rsBlob.length() < HANABlobTypeDescriptor.this.maxLobPrefetchSize ) { return javaTypeDescriptor.wrap( rsBlob, options ); } Blob blob = new MaterializedBlob( DataHelper.extractBytes( rsBlob.getBinaryStream() ) ); return javaTypeDescriptor.wrap( blob, options ); }
public Object get(ResultSet rs, int columnIndex) throws SQLException { Blob blob = rs.getBlob(columnIndex); if (blob == null) return null; File f = this.createTempFile(); Files.write(f, blob.getBinaryStream()); return new SimpleBlob(f); }
public Object get(ResultSet rs, String colName) throws SQLException { Blob blob = rs.getBlob(colName); if (blob == null) return null; File f = this.createTempFile(); Files.write(f, blob.getBinaryStream()); return new SimpleBlob(f); }
@Override public String toString(Blob value) { final byte[] bytes; try { bytes = DataHelper.extractBytes( value.getBinaryStream() ); } catch ( SQLException e ) { throw new HibernateException( "Unable to access blob stream", e ); } return PrimitiveByteArrayTypeDescriptor.INSTANCE.toString( bytes ); }
@Override public Blob mergeBlob(Blob original, Blob target, SharedSessionContractImplementor session) { if ( original != target ) { try { // the BLOB just read during the load phase of merge final OutputStream connectedStream = target.setBinaryStream( 1L ); // the BLOB from the detached state final InputStream detachedStream = original.getBinaryStream(); StreamCopier.copy( detachedStream, connectedStream ); return target; } catch (SQLException e ) { throw session.getFactory().getSQLExceptionHelper().convert( e, "unable to merge BLOB data" ); } } else { return NEW_LOCATOR_LOB_MERGE_STRATEGY.mergeBlob( original, target, session ); } }
@Override public Blob mergeBlob(Blob original, Blob target, SharedSessionContractImplementor session) { if ( original == null && target == null ) { return null; } try { final LobCreator lobCreator = session.getFactory().getServiceRegistry().getService( JdbcServices.class ).getLobCreator( session ); return original == null ? lobCreator.createBlob( ArrayHelper.EMPTY_BYTE_ARRAY ) : lobCreator.createBlob( original.getBinaryStream(), original.length() ); } catch (SQLException e) { throw session.getFactory().getSQLExceptionHelper().convert( e, "unable to merge BLOB data" ); } }
@Test @Override public void testExternalization() { // blobs of the same internal value are not really comparable String externalized = BlobTypeDescriptor.INSTANCE.toString( original ); Blob consumed = BlobTypeDescriptor.INSTANCE.fromString( externalized ); try { PrimitiveByteArrayTypeDescriptor.INSTANCE.areEqual( DataHelper.extractBytes( original.getBinaryStream() ), DataHelper.extractBytes( consumed.getBinaryStream() ) ); } catch ( SQLException e ) { fail( "SQLException accessing blob : " + e.getMessage() ); } }