throw new IllegalArgumentException("handler must not be null"); //$NON-NLS-1$ if (isResolved()) { return this; for (Map.Entry<String, String> entry : getHadoopProperties().entrySet()) { resolvedProperties.put(entry.getKey(), resolver.resolve(entry.getValue())); for (Map.Entry<String, String> entry : getEnvironmentVariables().entrySet()) { resolvedEnvironments.put(entry.getKey(), resolver.resolve(entry.getValue())); return new HadoopScript( getId(), getBlockerIds(), className, resolvedProperties,
@Override public String toString() { return MessageFormat.format( "Hadoop'{'id={0}, blockers={1}, class={2}, properties={3}, environment={4}'}'", getId(), getBlockerIds(), getClassName(), getHadoopProperties(), getEnvironmentVariables()); }
properties.setProperty(scriptPrefix + KEY_CLASS_NAME, s.getClassName()); String propPrefix = scriptPrefix + KEY_PROP_PREFIX; for (Map.Entry<String, String> entry : s.getHadoopProperties().entrySet()) { properties.setProperty(propPrefix + entry.getKey(), entry.getValue());
@Override public void cleanUp( ExecutionMonitor monitor, ExecutionContext context) throws InterruptedException, IOException { HadoopScript script = new HadoopScript( context.getPhase().getSymbol(), Collections.emptySet(), CLEANUP_STAGE_CLASS, Collections.emptyMap(), Collections.emptyMap()); run(monitor, context, script); }
private Map<String, String> buildHadoopProperties( ExecutionContext context, HadoopScript script) throws InterruptedException, IOException { assert context != null; assert script != null; Map<String, String> props = new TreeMap<>(); props.putAll(getProperties(context, script)); props.putAll(script.getHadoopProperties()); props.put(HadoopScriptUtil.PROP_TRACKING_ID, Job.computeTrackingId(context, script)); return props; }
private List<String> buildExecutionCommand( ExecutionContext context, HadoopScript script) throws IOException, InterruptedException { assert context != null; assert script != null; List<String> command = new ArrayList<>(); command.add(getCommand(context, PATH_EXECUTE, script)); command.add(script.getClassName()); command.add(context.getBatchId()); command.add(context.getFlowId()); command.add(context.getExecutionId()); command.add(context.getArgumentsAsString()); Map<String, String> props = buildHadoopProperties(context, script); for (Map.Entry<String, String> entry : props.entrySet()) { command.add("-D"); command.add(MessageFormat.format("{0}={1}", entry.getKey(), entry.getValue())); } return command; }
results.add(new ScriptJob<>(exec.resolve(context, hadoopHandler), hadoopHandler)); break;
String className = extract(contents, prefix, KEY_CLASS_NAME); Map<String, String> properties = PropertiesUtil.createPrefixMap(contents, KEY_PROP_PREFIX); script = new HadoopScript( scriptId, blockers, className, properties, environmentVariables,
private JobScript convert(ExecutionContext context, HadoopScript script) throws InterruptedException, IOException { assert context != null; assert script != null; JobScript result = new JobScript(); result.setBatchId(context.getBatchId()); result.setFlowId(context.getFlowId()); result.setExecutionId(context.getExecutionId()); result.setPhase(context.getPhase()); result.setArguments(new HashMap<>(context.getArguments())); result.setStageId(script.getId()); result.setMainClassName(script.getClassName()); Map<String, String> props = new HashMap<>(); props.putAll(getProperties(context, script)); props.putAll(script.getHadoopProperties()); props.put(HadoopScriptUtil.PROP_TRACKING_ID, Job.computeTrackingId(context, script)); result.setProperties(props); Map<String, String> env = new HashMap<>(); // NOTE: Handler has only dummy environment variables // env.putAll(getEnvironmentVariables(context, script)); env.putAll(context.getEnvironmentVariables()); env.putAll(script.getEnvironmentVariables()); result.setEnvironmentVariables(env); return result; }
@Override public void cleanUp( ExecutionMonitor monitor, ExecutionContext context) throws InterruptedException, IOException { monitor.open(1); try { if (cleanup) { YSLOG.info("I51001", context.getBatchId(), context.getFlowId(), context.getExecutionId(), getHandlerId()); HadoopScript script = new HadoopScript( context.getPhase().getSymbol(), Collections.emptySet(), CLEANUP_STAGE_CLASS, Collections.emptyMap(), Collections.emptyMap()); execute0(monitor, context, script); } else { YSLOG.info("I51002", context.getBatchId(), context.getFlowId(), context.getExecutionId(), getHandlerId()); } } finally { monitor.close(); } }