/** * Constructs file output stream. * * @param ggfsCtx GGFS context. * @param path Path to stored file. * @param fileInfo File info to write binary data to. * @param prefetchBlocks Number of blocks to prefetch. * @param seqReadsBeforePrefetch Amount of sequential reads before prefetch is triggered. * @param secReader Optional secondary file system reader. * @param metrics Local GGFS metrics. */ GridGgfsInputStreamImpl(GridGgfsContext ggfsCtx, GridGgfsPath path, GridGgfsFileInfo fileInfo, int prefetchBlocks, int seqReadsBeforePrefetch, @Nullable GridGgfsReader secReader, GridGgfsLocalMetrics metrics) { assert ggfsCtx != null; assert path != null; assert fileInfo != null; assert metrics != null; this.path = path; this.fileInfo = fileInfo; this.prefetchBlocks = prefetchBlocks; this.seqReadsBeforePrefetch = seqReadsBeforePrefetch; this.secReader = secReader; this.metrics = metrics; meta = ggfsCtx.meta(); data = ggfsCtx.data(); log = ggfsCtx.kernalContext().log(GridGgfsInputStream.class); maxLocCacheSize = (prefetchBlocks > 0 ? prefetchBlocks : 1) * 3 / 2; locCache = new LinkedHashMap<>(maxLocCacheSize, 1.0f); pendingFuts = new GridConcurrentHashSet<>(prefetchBlocks > 0 ? prefetchBlocks : 1); }
/** * Constructs file output stream. * * @param ggfsCtx GGFS context. * @param path Path to stored file. * @param fileInfo File info to write binary data to. * @param prefetchBlocks Number of blocks to prefetch. * @param seqReadsBeforePrefetch Amount of sequential reads before prefetch is triggered. * @param secReader Optional secondary file system reader. * @param metrics Local GGFS metrics. */ GridGgfsInputStreamImpl(GridGgfsContext ggfsCtx, GridGgfsPath path, GridGgfsFileInfo fileInfo, int prefetchBlocks, int seqReadsBeforePrefetch, @Nullable GridGgfsReader secReader, GridGgfsLocalMetrics metrics) { assert ggfsCtx != null; assert path != null; assert fileInfo != null; assert metrics != null; this.path = path; this.fileInfo = fileInfo; this.prefetchBlocks = prefetchBlocks; this.seqReadsBeforePrefetch = seqReadsBeforePrefetch; this.secReader = secReader; this.metrics = metrics; meta = ggfsCtx.meta(); data = ggfsCtx.data(); log = ggfsCtx.kernalContext().log(GridGgfsInputStream.class); maxLocCacheSize = (prefetchBlocks > 0 ? prefetchBlocks : 1) * 3 / 2; locCache = new LinkedHashMap<>(maxLocCacheSize, 1.0f); pendingFuts = new GridConcurrentHashSet<>(prefetchBlocks > 0 ? prefetchBlocks : 1); }
evts = ggfsCtx.kernalContext().event(); meta = ggfsCtx.meta(); data = ggfsCtx.data(); secondaryFs = cfg.getSecondaryFileSystem();
evts = ggfsCtx.kernalContext().event(); meta = ggfsCtx.meta(); data = ggfsCtx.data(); secondaryFs = cfg.getSecondaryFileSystem();
data = ggfsCtx.data();
data = ggfsCtx.data();
@Override public GridGgfsSecondaryOutputStreamDescriptor onSuccess(Map<GridGgfsPath, GridGgfsFileInfo> infos) throws Exception { GridGgfsFileInfo info = infos.get(path); if (info.isDirectory()) throw new GridGgfsException("Failed to open output stream to the file in the " + "secondary file system because the path points to a directory: " + path); out = fs.append(path, bufSize, false, null); // Synchronize file ending. long len = info.length(); int blockSize = info.blockSize(); int remainder = (int)(len % blockSize); if (remainder > 0) { int blockIdx = (int)(len / blockSize); GridGgfsReader reader = fs.open(path, bufSize); try { ggfsCtx.data().dataBlock(info, path, blockIdx, reader).get(); } finally { reader.close(); } } // Set lock and return. info = lockInfo(info); metaCache.putx(info.id(), info); return new GridGgfsSecondaryOutputStreamDescriptor(infos.get(path.parent()).id(), info, out); }
@Override public GridGgfsSecondaryOutputStreamDescriptor onSuccess(Map<GridGgfsPath, GridGgfsFileInfo> infos) throws Exception { GridGgfsFileInfo info = infos.get(path); if (info.isDirectory()) throw new GridGgfsException("Failed to open output stream to the file in the " + "secondary file system because the path points to a directory: " + path); out = fs.append(path, bufSize, false, null); // Synchronize file ending. long len = info.length(); int blockSize = info.blockSize(); int remainder = (int)(len % blockSize); if (remainder > 0) { int blockIdx = (int)(len / blockSize); GridGgfsReader reader = fs.open(path, bufSize); try { ggfsCtx.data().dataBlock(info, path, blockIdx, reader).get(); } finally { reader.close(); } } // Set lock and return. info = lockInfo(info); metaCache.putx(info.id(), info); return new GridGgfsSecondaryOutputStreamDescriptor(infos.get(path.parent()).id(), info, out); }
/** * Constructor. * * @param ggfsCtx GGFS context. */ GridGgfsDeleteWorker(GridGgfsContext ggfsCtx) { super("ggfs-delete-worker%" + ggfsCtx.ggfs().name() + "%" + ggfsCtx.kernalContext().localNodeId() + "%"); this.ggfsCtx = ggfsCtx; meta = ggfsCtx.meta(); data = ggfsCtx.data(); evts = ggfsCtx.kernalContext().event(); String ggfsName = ggfsCtx.ggfs().name(); topic = F.isEmpty(ggfsName) ? TOPIC_GGFS : TOPIC_GGFS.topic(ggfsName); assert meta != null; assert data != null; log = ggfsCtx.kernalContext().log(GridGgfsDeleteWorker.class); }
/** * Constructor. * * @param ggfsCtx GGFS context. */ GridGgfsDeleteWorker(GridGgfsContext ggfsCtx) { super("ggfs-delete-worker%" + ggfsCtx.ggfs().name() + "%" + ggfsCtx.kernalContext().localNodeId() + "%"); assert ggfsCtx != null; this.ggfsCtx = ggfsCtx; meta = ggfsCtx.meta(); data = ggfsCtx.data(); evts = ggfsCtx.kernalContext().event(); String ggfsName = ggfsCtx.ggfs().name(); topic = F.isEmpty(ggfsName) ? TOPIC_GGFS : TOPIC_GGFS.topic(ggfsName); assert meta != null; assert data != null; log = ggfsCtx.kernalContext().log(GridGgfsDeleteWorker.class); }
new UpdateListing(path.name(), new GridGgfsListingEntry(newInfo), false)); GridFuture<?> delFut = ggfsCtx.data().delete(oldInfo);
new UpdateListing(path.name(), new GridGgfsListingEntry(newInfo), false)); GridFuture<?> delFut = ggfsCtx.data().delete(oldInfo);