@Override public InputStream getInputStream() throws IOException { return proxy.getInputStream(); }
ReaderBucketInputStream(boolean buffer) throws IOException { is = buffer ? bucket.getInputStream() : bucket.getInputStreamUnbuffered(); }
@Override public InputStream getInputStream() throws IOException { synchronized(this) { if(migrated) throw new IOException("Already migrated to a RandomAccessBucket"); if(freed) throw new IOException("Already freed"); } return bucket.getInputStream(); }
@Override public InputStream getInputStream() throws IOException { return new MyInputStream(underlying.getInputStream()); }
public static InputStream create(Bucket data) throws IOException { return new ReadBucketAndFreeInputStream(data.getInputStream(), data); }
/** * Parse a bucket of data into a Metadata structure. * @throws MetadataParseException If the parsing failed because of invalid metadata. * @throws IOException If we could not read the metadata from the bucket. */ public static Metadata construct(Bucket data) throws MetadataParseException, IOException { InputStream is = data.getInputStream(); Metadata m; try { DataInputStream dis = new DataInputStream(is); m = new Metadata(dis, data.size()); } finally { is.close(); } return m; }
private byte[] getPartAsLimitedBytes(Bucket part, int maxLength) { InputStream is = null; DataInputStream dis = null; try { is = part.getInputStream(); dis = new DataInputStream(is); byte[] buf = new byte[(int)Math.min(part.size(), maxLength)]; dis.readFully(buf, 0, buf.length); return buf; } catch (IOException ioe) { Logger.error(this, "Caught IOE:" + ioe.getMessage()); return new byte[0]; } finally { Closer.close(dis); if(dis == null) Closer.close(is); // DataInputStream.close() does this for us normally } }
@Override public void writeTo(OutputStream os, ClientContext context) throws IOException { try{ if(logMINOR) Logger.minor(this, "Generating Stream", new Exception("debug")); InputStream data = bucket.getInputStream(); try { FileUtil.copy(data, os, -1); } finally { data.close(); } os.close(); bucket.free(); if(logMINOR) Logger.minor(this, "Stream completely generated", new Exception("debug")); } finally { Closer.close(bucket); Closer.close(os); } }
@Override public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException, CompressionOutputSizeException { Bucket output = bf.makeBucket(maxWriteLength); InputStream is = null; OutputStream os = null; try { is = data.getInputStream(); os = output.getOutputStream(); compress(is, os, maxReadLength, maxWriteLength); // It is essential that the close()'s throw if there is any problem. is.close(); is = null; os.close(); os = null; } finally { Closer.close(is); Closer.close(os); } return output; }
private FilterStatus applyFilter(Bucket input, Bucket output, String mimeType, FilterOperation operation, NodeClientCore core) throws UnsafeContentTypeException, IOException { InputStream inputStream = null; OutputStream outputStream = null; try { inputStream = input.getInputStream(); outputStream = output.getOutputStream(); return applyFilter(inputStream, outputStream, mimeType, operation, core); } finally { Closer.close(inputStream); Closer.close(outputStream); } }
private Fetched createFetchResult(FreenetURI uri, InputStream inputStream) throws IOException { ClientMetadata clientMetadata = new ClientMetadata("application/xml"); Bucket bucket = mock(Bucket.class); when(bucket.getInputStream()).thenReturn(inputStream); FetchResult fetchResult = new FetchResult(clientMetadata, bucket); return new Fetched(uri, fetchResult); }
protected void parseDependencies(FetchResult result, int build) { InputStream is = null; try { is = result.asBucket().getInputStream(); parseDependencies(parseProperties(is, DEPENDENCIES_FILE), build); } catch (IOException e) { Logger.error(this, "IOException trying to read manifest on update"); } catch (Throwable t) { Logger.error(this, "Failed to parse update manifest: "+t, t); } finally { Closer.close(is); } }
private byte[] doCompress(byte[] uncompressedData) throws IOException { Bucket inBucket = new ArrayBucket(uncompressedData); BucketFactory factory = new ArrayBucketFactory(); Bucket outBucket = null; outBucket = Compressor.COMPRESSOR_TYPE.GZIP.compress(inBucket, factory, 32768, 32768); InputStream in = null; in = outBucket.getInputStream(); long size = outBucket.size(); byte[] outBuf = new byte[(int) size]; in.read(outBuf); return outBuf; } }
private byte[] doCompress(byte[] uncompressedData) throws IOException { Bucket inBucket = new ArrayBucket(uncompressedData); BucketFactory factory = new ArrayBucketFactory(); Bucket outBucket = null; outBucket = Compressor.COMPRESSOR_TYPE.BZIP2.compress(inBucket, factory, 32768, 32768); InputStream in = null; in = outBucket.getInputStream(); long size = outBucket.size(); byte[] outBuf = new byte[(int) size]; in.read(outBuf); return outBuf; } }
private byte[] doCompress(byte[] uncompressedData) throws IOException { Bucket inBucket = new ArrayBucket(uncompressedData); BucketFactory factory = new ArrayBucketFactory(); Bucket outBucket = null; outBucket = Compressor.COMPRESSOR_TYPE.LZMA_NEW.compress(inBucket, factory, uncompressedData.length, uncompressedData.length * 2 + 64); InputStream in = null; in = outBucket.getInputStream(); long size = outBucket.size(); byte[] outBuf = new byte[(int) size]; in.read(outBuf); return outBuf; } }
public void testReadEmpty() throws IOException { Bucket bucket = makeBucket(3); try { assertEquals("Size-0", 0, bucket.size()); OutputStream os = bucket.getOutputStream(); os.close(); // Read byte[] InputStream is = bucket.getInputStream(); byte[] data = new byte[10]; int read = is.read(data, 0, 10); is.close(); assertEquals("Read-Empty", -1, read); } finally { freeBucket(bucket); } }
/** * Runs a Bucket through the content filter. * * @throws AssertionError on failure */ private static Bucket filterGIF(Bucket input) throws IOException { ContentDataFilter filter = new GIFFilter(); Bucket output = new ArrayBucket(); try (InputStream inStream = input.getInputStream(); OutputStream outStream = output.getOutputStream()) { filter.readFilter(inStream, outStream, "", null, null); } return output; }
public void testReject() throws IOException { for (String reject : REJECT) { try (InputStream inStream = resourceToBucket(reject).getInputStream(); NullOutputStream outStream = new NullOutputStream()) { ContentDataFilter filter = new GIFFilter(); try { filter.readFilter(inStream, outStream, "", null, null); fail("Filter did not fail on reject sample " + reject); } catch (DataFilterException e) { // Expected. } } } }
public void checkSuccessfulRoundTrip(int keysize, Random random, Bucket input, Bucket output, Bucket decoded) throws IOException { byte[] key = new byte[keysize]; random.nextBytes(key); OutputStream os = output.getOutputStream(); AEADOutputStream cos = AEADOutputStream.innerCreateAES(os, key, random); BucketTools.copyTo(input, cos, -1); cos.close(); assertTrue(output.size() > input.size()); InputStream is = output.getInputStream(); AEADInputStream cis = AEADInputStream.createAES(is, key); BucketTools.copyFrom(decoded, cis, -1); assertEquals(decoded.size(), input.size()); assertTrue(BucketTools.equalBuckets(decoded, input)); }
public void checkSuccessfulRoundTripRandomSplits(int keysize, Random random, Bucket input, Bucket output, Bucket decoded) throws IOException { byte[] key = new byte[keysize]; random.nextBytes(key); OutputStream os = output.getOutputStream(); AEADOutputStream cos = AEADOutputStream.innerCreateAES(os, key, random); BucketTools.copyTo(input, new RandomShortWriteOutputStream(cos, random), -1); cos.close(); assertTrue(output.size() > input.size()); InputStream is = output.getInputStream(); AEADInputStream cis = AEADInputStream.createAES(is, key); BucketTools.copyFrom(decoded, new RandomShortReadInputStream(cis, random), -1); assertEquals(decoded.size(), input.size()); assertTrue(BucketTools.equalBuckets(decoded, input)); }