/** * Returns the transaction ID. * @return the transaction ID */ public String getId() { return Optional.ofNullable(transactionId) .orElseThrow(() -> new CommandConfigurationException( "transaction ID (execution ID of the target jobflow) must be specified")); } }
private static void handle(CommandConfigurationException e) { LOG.error("{}", e.getMessage()); }
private Optional<org.apache.hadoop.fs.FileStatus> stat(org.apache.hadoop.fs.Path path) { try { return Optional.of(dataSourceParameter.getHadoopFileSystem(path).getFileStatus(path)); } catch (FileNotFoundException e) { LOG.trace("not found: {}", path, e); return Optional.empty(); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "error occurred while resolving Hadoop path: {0}", path), e); } }
/** * Program entry. * @param args command line tokens */ public static void main(String... args) { try { exec(args); } catch (CommandExecutionException e) { LOG.error("error occurred while executing command", e); System.exit(1); } catch (CommandConfigurationException e) { LOG.error("{}", e.getMessage()); LOG.debug("configuration error detail: {}", Arrays.toString(args), e); System.exit(2); } catch (ParameterException e) { LOG.error("cannot recognize arguments: {}", Arrays.toString(args), e); System.exit(3); } }
private Optional<FileStatus> stat(Path path) { try { return Optional.of(dataSourceParameter.getHadoopFileSystem(path).getFileStatus(path)); } catch (FileNotFoundException e) { LOG.trace("not found: {}", path, e); return Optional.empty(); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "error occurred while resolving Hadoop path: {0}", path), e); } }
/** * Program entry. * @param args command line tokens */ public static void main(String... args) { try { exec(args); } catch (CommandExecutionException e) { LOG.error("error occurred while executing command", e); System.exit(1); } catch (CommandConfigurationException e) { LOG.error("{}", e.getMessage()); LOG.debug("configuration error detail: {}", Arrays.toString(args), e); System.exit(2); } catch (ParameterException e) { LOG.error("cannot recognize arguments: {}", Arrays.toString(args), e); System.exit(3); } }
/** * Returns the path. * @return the path * @throws CommandConfigurationException if there is no available batch application base directory */ public Path getPath() { if (path == null) { throw new CommandConfigurationException(MessageFormat.format( "please set environment variable \"{0}\", or specify {1} /path/to/batchapps to command line", ENV_ASAKUSA_HOME, OPT_BATCHAPPS)); } Path result = LocalPath.of(path); if (Files.isDirectory(result) == false) { throw new CommandConfigurationException(MessageFormat.format( "batch applications directory is not found: {0}", result)); } return result; }
/** * Program entry. * @param args command line tokens */ public static void main(String... args) { try { exec(args); } catch (CommandExecutionException e) { LOG.error("error occurred while executing command", e); System.exit(1); } catch (CommandConfigurationException e) { LOG.error("{}", e.getMessage()); LOG.debug("configuration error detail: {}", Arrays.toString(args), e); System.exit(2); } catch (ParameterException e) { JCommanderWrapper.handle(e, s -> LOG.error("{}", s)); //$NON-NLS-1$ System.exit(3); } }
/** * Returns the Hadoop file system for the path. * @param path the target path * @return the corresponded file system object */ public org.apache.hadoop.fs.FileSystem getHadoopFileSystem(org.apache.hadoop.fs.Path path) { try { return path.getFileSystem(getConfiguration()); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "error occurred while resolving Hadoop path: {0} ({1})", path, Optional.ofNullable(e.getMessage()).orElseGet(() -> e.toString())), e); } }
/** * Returns the target jobflow, and must be it is unique in the current context. * @return the target jobflow * @throws CommandConfigurationException if there are no available jobflows, or they are ambiguous */ public JobflowInfo getUniqueJobflow() { List<JobflowInfo> candidates = getJobflows(); if (candidates.size() != 1) { throw new CommandConfigurationException(MessageFormat.format( "target jobflow is ambiguous, please specify \"--jobflow <flow-ID>\": '{'{0}'}'", candidates.stream() .map(JobflowInfo::getId) .collect(Collectors.joining(", ")))); } return candidates.get(0); } }
private java.nio.file.Path getDestination() { java.nio.file.Path destination = localPathParameter.resolve(paths.get(paths.size() - 1)); if (overwriteParameter.isEnabled() == false && Files.isRegularFile(destination)) { throw new CommandConfigurationException(MessageFormat.format( "destination file already exists: {0}", destination)); } return destination; }
/** * Returns a local path. * @param path the path string * @param defaultWorkingDirectory the default working directory (nullable) * @return the local path * @throws CommandConfigurationException if the path cannot be resolved */ public static Path of(String path, Path defaultWorkingDirectory) { Path candidate = Paths.get(path); if (candidate.isAbsolute()) { return candidate; } else if (WORKING_DIRECTORY != null) { if (WORKING_DIRECTORY.isAbsolute() == false) { throw new CommandConfigurationException(MessageFormat.format( "custom working dierctory path must be absolute: {0}", WORKING_DIRECTORY)); } Path result = WORKING_DIRECTORY.resolve(path); LOG.debug("resolve local path: {} -> {}", path, result); //$NON-NLS-1$ return result; } else if (defaultWorkingDirectory != null) { return defaultWorkingDirectory.resolve(path).toAbsolutePath(); } else { throw new CommandConfigurationException(MessageFormat.format( "local file path must be absolute: {0}", path)); } } }
/** * Resolves the candidates of logical locations into physical one. * @param sources candidates of logical locations * @return resolved physical location */ public Optional<String> resolve(Collection<? extends LocationInfo> sources) { List<Optional<String>> candidates = sources.stream() .map(this::resolve) .distinct() .collect(Collectors.toList()); if (candidates.isEmpty()) { return Optional.empty(); } else if (candidates.size() == 1) { return candidates.get(0); } else { throw new CommandConfigurationException(MessageFormat.format( "ambiguous location of table: {0}", candidates.stream() .map(it -> it.orElse("(N/A)")) .collect(Collectors.joining(", ")))); } } }
private static Pattern parseSegment(String pattern) { StringBuilder buf = new StringBuilder(); int start = 0; while (true) { int next = pattern.indexOf('*', start); if (next < 0) { break; } if (start < next) { buf.append(Pattern.quote(pattern.substring(start, next))); } buf.append(".*"); //$NON-NLS-1$ start = next + 1; } if (start < pattern.length()) { buf.append(Pattern.quote(pattern.substring(start))); } try { return Pattern.compile(buf.toString()); } catch (PatternSyntaxException e) { throw new CommandConfigurationException(MessageFormat.format( "cannot recognize pattern: {0}", pattern), e); } }
@Override public void run() { LOG.debug("starting {}", getClass().getSimpleName()); if (paths.isEmpty()) { throw new CommandConfigurationException("no target paths are specified"); } List<Path> hpaths = paths.stream() .map(dataSourceParameter::resolveAsHadoopPath) .collect(Collectors.toList()); try (PrintWriter writer = outputParameter.open()) { executorParameter.execute(hpaths.stream() .map(path -> (Task) context -> mkdir(writer, path)) .collect(Collectors.toList())); } }
/** * Returns a list of available batch application names on the current context. * @param context the current context * @return the available application names */ public static List<String> getAvailableApplications(ExecutionContext context) { return TaskExecutors.findApplicationHome(context) .filter(Files::isDirectory) .map(it -> { try { return Files.list(it); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "exception occurred while traversing directory: {0}", it), e); } }) .orElse(Stream.empty()) .filter(it -> Files.exists(it.resolve(TaskExecutors.LOCATION_APPLICATION_WORKFLOW_DEFINITION))) .map(Path::getFileName) .filter(it -> it != null) .map(Path::toString) .sorted() .collect(Collectors.toList()); } }
/** * Returns workflow information. * @param context the current context * @return workflow information * @throws CommandConfigurationException if there is no available workflow file */ public BatchInfo getBatchInfo(ExecutionContext context) { if (workflow == null) { throw new CommandConfigurationException(MessageFormat.format( "target batch ID must be specified ({0})", getAvailableApplicationsMessage(context))); } Path path = resolvePath(context) .orElseThrow(() -> new CommandConfigurationException(MessageFormat.format( "batch application \"{0}\"is not found ({1})", workflow, getAvailableApplicationsMessage(context)))); LOG.debug("loading workflow definition: {}", path); File file = path.toFile(); ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(file, BatchInfo.class); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "error occurred while loading workflow definition: {0}", file), e); } }
/** * Opens the command output. * @return the command output */ public PrintWriter open() { try { if (Objects.equals(output, DEFAULT_OUTPUT)) { return new PrintWriter(new OutputStreamWriter(System.out, encoding), true) { @Override public void close() { // NOTE: never close stdout flush(); } }; } else { Path file = LocalPath.of(output); Path parent = file.getParent(); if (parent != null) { Files.createDirectories(parent); } return new PrintWriter(Files.newBufferedWriter(file, Charset.forName(encoding)), true); } } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "error occurred while configuring the command output: output={0}, encoding={1}", output, encoding), e); } } }
@Override public void run() { LOG.debug("starting {}", getClass().getSimpleName()); String id = transactionIdParameter.getId(); TransactionInfo spec = transactionEditorParameter.find(id) .orElseThrow(() -> new CommandConfigurationException(MessageFormat.format( "transaction \"{0}\" is not found (may be already completed)", id))); try (PrintWriter writer = outputParameter.open()) { print(writer, spec, 0); } }
/** * Loads the given information file. * @param path the target information file path * @return the loaded information * @throws CommandConfigurationException if error occurred while loading the information file */ public static BatchInfo load(Path path) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(path.toFile(), BatchInfo.class); } catch (IOException e) { throw new CommandConfigurationException(MessageFormat.format( "failed to load DSL information file: {0}", path), e); } }