in = fs.open(idPath); try { cid = in.readUTF(); clusterId = new ClusterId(cid); } catch (EOFException eof) {
@Override public GlobalDictMetadata readIndexFile(Path dir) throws IOException { Path indexFile = new Path(dir, V2_INDEX_NAME); try (FSDataInputStream in = fs.open(indexFile)) { byte minorVersion = in.readByte(); // include a header to allow minor format changes if (minorVersion != MINOR_VERSION_V1) { throw new RuntimeException("Unsupported minor version " + minorVersion); } int baseId = in.readInt(); int maxId = in.readInt(); int maxValueLength = in.readInt(); int nValues = in.readInt(); String converterName = in.readUTF(); BytesConverter converter; try { converter = ClassUtil.forName(converterName, BytesConverter.class).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException("Fail to instantiate BytesConverter: " + converterName, e); } int nSlices = in.readInt(); TreeMap<AppendDictSliceKey, String> sliceFileMap = new TreeMap<>(); for (int i = 0; i < nSlices; i++) { AppendDictSliceKey key = new AppendDictSliceKey(); key.readFields(in); String sliceFileName = in.readUTF(); sliceFileMap.put(key, sliceFileName); } return new GlobalDictMetadata(baseId, maxId, maxValueLength, nValues, converter, sliceFileMap); } }
@Override public GlobalDictMetadata readIndexFile(Path dir) throws IOException { Path indexFile = new Path(dir, V1_INDEX_NAME); try (FSDataInputStream in = fs.open(indexFile)) { int baseId = in.readInt(); int maxId = in.readInt(); int maxValueLength = in.readInt(); int nValues = in.readInt(); String converterName = in.readUTF(); BytesConverter converter; try { converter = ClassUtil.forName(converterName, BytesConverter.class).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException("Fail to instantiate BytesConverter: " + converterName, e); } int nSlices = in.readInt(); TreeMap<AppendDictSliceKey, String> sliceFileMap = new TreeMap<>(); for (int i = 0; i < nSlices; i++) { AppendDictSliceKey key = new AppendDictSliceKey(); key.readFields(in); sliceFileMap.put(key, sliceFileName(key)); } // make sure first key is always "" String firstFile = sliceFileMap.remove(sliceFileMap.firstKey()); sliceFileMap.put(AppendDictSliceKey.START_KEY, firstFile); return new GlobalDictMetadata(baseId, maxId, maxValueLength, nValues, converter, sliceFileMap); } }
int fileCount = mdis.readInt(); for (int i = 0; i < fileCount; ++i) { String nextFile = mdis.readUTF(); Utilities.FILE_OP_LOGGER.trace("Looking at committed file: {}", nextFile); Path path = fs.makeQualified(new Path(nextFile));
String absolutePathString = targetFile.getPath().toString(); String relativePath = absolutePathString.substring(workDirPrefixLength); targetFiles.put(relativePath, in.readUTF());
/** * @see java.io.DataInput#readUTF() */ @Override public String readUTF() throws IOException { return this.fis.readUTF(); }
/** * @see java.io.DataInput#readUTF() */ @Override public String readUTF() throws IOException { return this.fis.readUTF(); }
/** * @see java.io.DataInput#readUTF() */ @Override public String readUTF() throws IOException { return this.fis.readUTF(); }
public final String readUTF() throws IOException { return in.readUTF(); }
public final String readUTF() throws IOException { return in.readUTF(); }
public static String readString(FileSystem fs, String path) { try { FSDataInputStream in = fs.open(new Path(path)); String val = in.readUTF(); in.close(); return val; } catch (Exception e) { throw new RuntimeException("Unable to read property at " + path); } }
@Test public void testHBaseStoreWithLargeCell() throws Exception { String path = "/cube/_test_large_cell.json"; String largeContent = "THIS_IS_A_LARGE_CELL"; StringEntity content = new StringEntity(largeContent); KylinConfig config = KylinConfig.getInstanceFromEnv(); int origSize = config.getHBaseKeyValueSize(); ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv()); try { config.setProperty("kylin.hbase.client.keyvalue.maxsize", String.valueOf(largeContent.length() - 1)); store.deleteResource(path); store.putResource(path, content, StringEntity.serializer); assertTrue(store.exists(path)); StringEntity t = store.getResource(path, StringEntity.class, StringEntity.serializer); assertEquals(content, t); Path redirectPath = ((HBaseResourceStore) store).bigCellHDFSPath(path); Configuration hconf = HadoopUtil.getCurrentConfiguration(); FileSystem fileSystem = FileSystem.get(hconf); assertTrue(fileSystem.exists(redirectPath)); FSDataInputStream in = fileSystem.open(redirectPath); assertEquals(largeContent, in.readUTF()); in.close(); store.deleteResource(path); } finally { config.setProperty("kylin.hbase.client.keyvalue.maxsize", "" + origSize); store.deleteResource(path); } }
private String getJobSummary(FileContext fc, Path path) throws IOException { Path qPath = fc.makeQualified(path); FSDataInputStream in = null; String jobSummaryString = null; try { in = fc.open(qPath); jobSummaryString = in.readUTF(); } finally { if (in != null) { in.close(); } } return jobSummaryString; }
private String getJobSummary(FileContext fc, Path path) throws IOException { Path qPath = fc.makeQualified(path); FSDataInputStream in = null; String jobSummaryString = null; try { in = fc.open(qPath); jobSummaryString = in.readUTF(); } finally { if (in != null) { in.close(); } } return jobSummaryString; }
private String getJobSummary(FileContext fc, Path path) throws IOException { Path qPath = fc.makeQualified(path); FSDataInputStream in = null; String jobSummaryString = null; try { in = fc.open(qPath); jobSummaryString = in.readUTF(); } finally { if (in != null) { in.close(); } } return jobSummaryString; }
private void loadCounts() throws IOException { counts = new HashMap<Counts,Long>() ; FSDataInputStream in = getHdfs().open(getCountsPath()); while (in.available() > 0) { String c = in.readUTF() ; Long count = in.readLong() ; counts.put(Counts.valueOf(c), count) ; } in.close() ; }
private void loadCounts() throws IOException { counts = new HashMap<Counts,Long>() ; FSDataInputStream in = getHdfs().open(getCountsPath()); while (in.available() > 0) { String c = in.readUTF() ; Long count = in.readLong() ; counts.put(Counts.valueOf(c), count) ; } in.close() ; }
private void loadUnforwardedCounts() throws IOException { unforwardedCounts = new HashMap<Unforwarded,Long>() ; FSDataInputStream in = getHdfs().open(getUnforwardedCountsPath()); while (in.available() > 0) { String u = in.readUTF() ; Long count = in.readLong() ; unforwardedCounts.put(Unforwarded.valueOf(u), count) ; } in.close() ; }
private void loadCounts() throws IOException { counts = new HashMap<Counts,Long>() ; FSDataInputStream in = getHdfs().open(getUnforwardedCountsPath()); while (in.available() > 0) { String c = in.readUTF() ; Long count = in.readLong() ; counts.put(Counts.valueOf(c), count) ; } in.close() ; }
@Override public Boolean get() { try { FSDataInputStream in = fs.open(metricsPath); String metricsOutput = in.readUTF(); in.close(); // Just assert that there is some metrics content in there assertTrue(metricsOutput.contains("JvmMetrics")); return true; } catch (IOException ioe) { return false; } } }, 3000, 60000);