private static byte[] encodeUtf8(String str) { if (str == null) { return new byte[0]; } try { return str.getBytes("UTF-8"); } catch (UnsupportedEncodingException e) { throw new DatasetIOException("[FATAL] Cannot decode UTF-8", e); } }
private static FileSystem fsForPath(Path dataPath, Configuration conf) { try { return dataPath.getFileSystem(conf); } catch (IOException ex) { throw new DatasetIOException( "Cannot get FileSystem for descriptor", ex); } }
static FileSystem fsForPath(Configuration conf, Path path) { try { return path.getFileSystem(conf); } catch (IOException ex) { throw new DatasetIOException("Cannot access FileSystem for uri:" + path, ex); } }
public HiveExternalMetadataProvider(Configuration conf, Path rootDirectory) { super(conf); Preconditions.checkNotNull(rootDirectory, "Root cannot be null"); try { this.rootFileSystem = rootDirectory.getFileSystem(conf); this.rootDirectory = rootFileSystem.makeQualified(rootDirectory); } catch (IOException ex) { throw new DatasetIOException("Could not get FileSystem for root path", ex); } }
private boolean advance() { try { next = reader.readNext(); } catch (IOException ex) { throw new DatasetIOException("Could not read record", ex); } return (next != null); }
private void advance() { try { this.next = reader.read(); } catch (EOFException e) { this.next = null; } catch (IOException e) { this.state = ReaderWriterState.ERROR; throw new DatasetIOException("Unable to read next record from: " + path, e); } }
public HiveExternalMetadataProvider(Configuration conf, Path rootDirectory) { super(conf); Preconditions.checkNotNull(rootDirectory, "Root cannot be null"); try { this.rootFileSystem = rootDirectory.getFileSystem(conf); this.rootDirectory = rootFileSystem.makeQualified(rootDirectory); } catch (IOException ex) { throw new DatasetIOException("Could not get FileSystem for root path", ex); } }
public static <T> T parse(String json, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(json, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
public FileSystemMetadataProvider(Configuration conf, Path rootDirectory) { Preconditions.checkNotNull(conf, "Configuration cannot be null"); Preconditions.checkNotNull(rootDirectory, "Root directory cannot be null"); this.conf = conf; try { this.rootFileSystem = rootDirectory.getFileSystem(conf); this.rootDirectory = rootFileSystem.makeQualified(rootDirectory); } catch (IOException ex) { throw new DatasetIOException("Cannot get FileSystem for root path", ex); } }
@Override public void partitionAdded(String namespace, String name, String path) { Path partitionPath = new Path(pathForDataset(namespace, name), path); try { rootFileSystem.mkdirs(partitionPath); } catch (IOException ex) { throw new DatasetIOException( "Unable to create partition directory " + partitionPath, ex); } super.partitionAdded(namespace, name, path); }
public Builder() { this.properties = Maps.newHashMap(); this.conf = DefaultConfiguration.get(); try { this.defaultFS = FileSystem.get(conf).getUri(); } catch (IOException e) { throw new DatasetIOException("Cannot get the default FS", e); } }
public static Iterator<JsonNode> parser(final InputStream stream) { try { JsonParser parser = FACTORY.createParser(stream); parser.setCodec(new ObjectMapper()); return parser.readValuesAs(JsonNode.class); } catch (IOException e) { throw new DatasetIOException("Cannot read from stream", e); } }
public static <T> T parse(File file, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(file, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
public static <T> T parse(InputStream in, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(in, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
@Override public void partitionAdded(String namespace, String name, String path) { Path partitionPath = new Path(pathForDataset(namespace, name), path); try { rootFileSystem.mkdirs(partitionPath); } catch (IOException ex) { throw new DatasetIOException( "Unable to create partition directory " + partitionPath, ex); } super.partitionAdded(namespace, name, path); }
public E read(String line, @Nullable E reuse) { try { return builder.makeRecord(parser.parseLine(line), reuse); } catch (IOException e) { throw new DatasetIOException("Cannot parse line: " + line, e); } }
public MemoryMetadataProvider(Configuration conf) { Preconditions.checkNotNull(conf, "Configuration cannot be null"); this.conf = conf; try { this.fs = FileSystem.get(conf); } catch (IOException ex) { throw new DatasetIOException("Could not get default FileSystem", ex); } }
static void finishMove(FileSystem fs, List<Pair<Path, Path>> staged) { try { for (Pair<Path, Path> pair : staged) { if (!fs.rename(pair.first(), pair.second())) { throw new IOException( "Failed to rename " + pair.first() + " to " + pair.second()); } } } catch (IOException e) { throw new DatasetIOException("Could not finish replacement", e); } }
@Override public void flush() { Preconditions.checkState(state.equals(ReaderWriterState.OPEN), "Attempt to flush a writer in state:%s", state); try { table.flushCommits(); } catch (IOException e) { throw new DatasetIOException("Error flushing commits for table [" + table + "]", e); } }
@Override public E next() { Preconditions.checkState(state.equals(ReaderWriterState.OPEN), "Attempt to read from a file in state:%s", state); E record = DataModelUtil.createRecord(type, schema); try { return reader.next(record); } catch (IOException ex) { throw new DatasetIOException("Cannot advance reader", ex); } }