Refine search
/** * Create a {@link RegionInfo} from the serialized version on-disk. * @param fs {@link FileSystem} that contains the Region Info file * @param regionDir {@link Path} to the Region Directory that contains the Info file * @return An {@link RegionInfo} instance gotten from the Region Info file. * @throws IOException if an error occurred during file open/read operation. */ public static RegionInfo loadRegionInfoFileContent(final FileSystem fs, final Path regionDir) throws IOException { FSDataInputStream in = fs.open(new Path(regionDir, REGION_INFO_FILE)); try { return RegionInfo.parseFrom(in); } finally { in.close(); } }
@Test(enabled = false) public void testOpen2() throws IOException, URISyntaxException { HDFSRoot hdfsRoot = new HDFSRoot("/tmp/Open"); MetricsFileSystemInstrumentation fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration()); Path newFile = new Path(hdfsRoot.getRootPath(), new Path("file8.ext")); FSDataInputStream fstream = fs.open(newFile, 100); Assert.assertEquals(fs.openTimer.getCount(), 1); fstream.close(); hdfsRoot.cleanupRoot(); }
protected static Schema getSchemaFromFS(String schemaFSUrl, Configuration conf) throws IOException, URISyntaxException { FSDataInputStream in = null; FileSystem fs = null; try { fs = FileSystem.get(new URI(schemaFSUrl), conf); } catch (IOException ioe) { //return null only if the file system in schema is not recognized if (LOG.isDebugEnabled()) { String msg = "Failed to open file system for uri " + schemaFSUrl + " assuming it is not a FileSystem url"; LOG.debug(msg, ioe); } return null; } try { in = fs.open(new Path(schemaFSUrl)); Schema s = AvroSerdeUtils.getSchemaFor(in); return s; } finally { if(in != null) in.close(); } }
@Test(enabled = false) public void testOpen1() throws IOException, URISyntaxException { HDFSRoot hdfsRoot = new HDFSRoot("/tmp/Open"); MetricsFileSystemInstrumentation fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration()); Path newFile = new Path(hdfsRoot.getRootPath(), new Path("file8.ext")); FSDataInputStream fstream = fs.open(newFile); Assert.assertEquals(fs.openTimer.getCount(), 1); fstream.close(); hdfsRoot.cleanupRoot(); }
private TxnRecord readTxnRecord(Path path) throws IOException { FSDataInputStream inputStream = null; try { inputStream = this.options.fs.open(path); BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); String line; if ((line = reader.readLine()) != null) { String[] fields = line.split(","); return new TxnRecord(Long.valueOf(fields[0]), fields[1], Long.valueOf(fields[2])); } } finally { if (inputStream != null) { inputStream.close(); } } return new TxnRecord(0, options.currentFile.toString(), 0); }
private Schema loadFromUrl(String schemaUrl) throws IOException { Configuration conf = new Configuration(); Schema.Parser parser = new Schema.Parser(); if (schemaUrl.toLowerCase(Locale.ENGLISH).startsWith("hdfs://")) { FileSystem fs = FileSystem.get(conf); FSDataInputStream input = null; try { input = fs.open(new Path(schemaUrl)); return parser.parse(input); } finally { if (input != null) { input.close(); } } } else { InputStream is = null; try { is = new URL(schemaUrl).openStream(); return parser.parse(is); } finally { if (is != null) { is.close(); } } } }
/** @throws Exception If failed. */ @Test public void testOpenIfPathIsAlreadyOpened() throws Exception { Path fsHome = new Path(primaryFsUri); Path file = new Path(fsHome, "someFile"); FSDataOutputStream os = fs.create(file, EnumSet.noneOf(CreateFlag.class), Options.CreateOpts.perms(FsPermission.getDefault())); os.close(); FSDataInputStream is1 = fs.open(file); FSDataInputStream is2 = fs.open(file); is1.close(); is2.close(); }
public static String readVertify(FileSystem fs, Path file) throws IOException { Path vertify = new Path(file, "vertify"); StringBuffer rtn = new StringBuffer(); if (fs.exists(vertify)) { Integer bytesRead = 0; int size = 10240; int maxsize = 1024 * 1024; byte[] buff = new byte[size]; FSDataInputStream in = fs.open(vertify); while (true) { int num = in.read(buff, 0, size); if (num < 0) { break; } bytesRead += num; rtn.append(new String(buff, 0, num, ENCODE_NAME)); if (bytesRead >= maxsize) { break; } } in.close(); } return rtn.toString().trim(); }
@Override public FSDataInputStream open(Path file, int bufferSize) throws IOException { FTPClient client = connect(); Path workDir = new Path(client.printWorkingDirectory()); Path absolute = makeAbsolute(workDir, file); FileStatus fileStat = getFileStatus(client, absolute); fis.close(); throw new IOException("Unable to open file: " + file + ", Aborting");
private String getSha(final Path localFile) throws IOException, IllegalArgumentException { FileSystem localFs = FileSystem.getLocal(conf); FileStatus fileStatus = localFs.getFileStatus(localFile); String key = getKey(fileStatus); String sha256 = shaCache.getIfPresent(key); if (sha256 == null) { FSDataInputStream is = null; try { is = localFs.open(localFile); long start = System.currentTimeMillis(); sha256 = DigestUtils.sha256Hex(is); long end = System.currentTimeMillis(); LOG.info("Computed sha: {} for file: {} of length: {} in {} ms", sha256, localFile, LlapUtil.humanReadableByteCount(fileStatus.getLen()), end - start); shaCache.put(key, sha256); } finally { if (is != null) { is.close(); } } } return sha256; } public void setQueueName(String queueName) {
/** This constructor is there primarily to serve the sort routine that * generates a single output file with an associated index file */ public SegmentContainer(Path inName, Path indexIn) throws IOException { //get the segments from indexIn FSDataInputStream fsIndexIn = fs.open(indexIn); long end = fs.getFileStatus(indexIn).getLen(); while (fsIndexIn.getPos() < end) { long segmentOffset = WritableUtils.readVLong(fsIndexIn); long segmentLength = WritableUtils.readVLong(fsIndexIn); Path segmentName = inName; segments.add(new LinkedSegmentsDescriptor(segmentOffset, segmentLength, segmentName, this)); } fsIndexIn.close(); fs.delete(indexIn, true); numSegmentsContained = segments.size(); this.inName = inName; }
public static Long readReadTimeTs(FileSystem fs, Path file) throws IOException { Path vertify = new Path(file, "realtime_ts"); int maxsize = 1024 * 1024; byte[] buff = new byte[size]; FSDataInputStream in = fs.open(vertify); in.close(); LOG.info("readReadTimeTs:"+rtn.toString()+":"+file.toString());
private static TableDescriptor readTableDescriptor(FileSystem fs, FileStatus status) throws IOException { int len = Ints.checkedCast(status.getLen()); byte [] content = new byte[len]; FSDataInputStream fsDataInputStream = fs.open(status.getPath()); try { fsDataInputStream.readFully(content); } finally { fsDataInputStream.close(); } TableDescriptor htd = null; try { htd = TableDescriptorBuilder.parseFrom(content); } catch (DeserializationException e) { throw new IOException("content=" + Bytes.toShort(content), e); } return htd; }
/** * Read in the {@link org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription} stored for the snapshot in the passed directory * @param fs filesystem where the snapshot was taken * @param snapshotDir directory where the snapshot was stored * @return the stored snapshot description * @throws CorruptedSnapshotException if the * snapshot cannot be read */ public static SnapshotDescription readSnapshotInfo(FileSystem fs, Path snapshotDir) throws CorruptedSnapshotException { Path snapshotInfo = new Path(snapshotDir, SNAPSHOTINFO_FILE); try { FSDataInputStream in = null; try { in = fs.open(snapshotInfo); SnapshotDescription desc = SnapshotDescription.parseFrom(in); return desc; } finally { if (in != null) in.close(); } } catch (IOException e) { throw new CorruptedSnapshotException("Couldn't read snapshot info from:" + snapshotInfo, e); } }