public DefaultTwillRunnableSpecification(String className, TwillRunnableSpecification other) { this.className = className; this.name = other.getName(); this.arguments = Collections.unmodifiableMap(new HashMap<String, String>(other.getConfigs())); }
public DefaultTwillRunnableSpecification(String className, TwillRunnableSpecification other) { this.className = className; this.name = other.getName(); this.arguments = Collections.unmodifiableMap(new HashMap<String, String>(other.getConfigs())); }
@Override public final void initialize(TwillContext context) { this.context = context; if (name == null) { name = context.getSpecification().getName(); } LOG.info("Initializing runnable: " + name); try { doInitialize(new File(context.getApplicationArguments()[0])); LOG.info("Runnable initialized: {}", name); } catch (Throwable t) { LOG.error(t.getMessage(), t); throw Throwables.propagate(t); } }
@Override public final void initialize(TwillContext context) { this.context = context; if (name == null) { name = context.getSpecification().getName(); } LOG.info("Initializing runnable: " + name); try { doInitialize(new File(context.getApplicationArguments()[0])); LOG.info("Runnable initialized: {}", name); } catch (Throwable t) { LOG.error(t.getMessage(), t); throw Throwables.propagate(t); } }
@Override public JsonElement serialize(TwillRunnableSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject json = new JsonObject(); json.addProperty("classname", src.getClassName()); json.addProperty("name", src.getName()); json.add("arguments", context.serialize(src.getConfigs(), new TypeToken<Map<String, String>>() { }.getType())); return json; }
@Override public JsonElement serialize(TwillRunnableSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject json = new JsonObject(); json.addProperty("classname", src.getClassName()); json.addProperty("name", src.getName()); json.add("arguments", context.serialize(src.getConfigs(), new TypeToken<Map<String, String>>() { }.getType())); return json; }
@Override public RuntimeSpecificationAdder add(TwillRunnable runnable, ResourceSpecification resourceSpec) { return add(runnable.configure().getName(), runnable, resourceSpec); }
@Override public RuntimeSpecificationAdder add(TwillRunnable runnable, ResourceSpecification resourceSpec) { return add(runnable.configure().getName(), runnable, resourceSpec); }
@Override public RuntimeSpecificationAdder add(TwillRunnable runnable) { return add(runnable.configure().getName(), runnable); }
@Override public RuntimeSpecificationAdder add(TwillRunnable runnable) { return add(runnable.configure().getName(), runnable); }
@Override public final void initialize(TwillContext context) { super.initialize(context); name = context.getSpecification().getName(); Map<String, String> configs = context.getSpecification().getConfigs(); try { // Load configuration hConf = new Configuration(); hConf.clear(); hConf.addResource(new File(configs.get("hConf")).toURI().toURL()); UserGroupInformation.setConfiguration(hConf); cConf = CConfiguration.create(); cConf.clear(); cConf.addResource(new File(configs.get("cConf")).toURI().toURL()); LOG.debug("{} cConf {}", name, cConf); LOG.debug("{} HBase conf {}", name, hConf); doInit(context); services = Lists.newArrayList(); getServices(services); Preconditions.checkArgument(!services.isEmpty(), "Should have at least one service"); } catch (Throwable t) { throw Throwables.propagate(t); } }
/** * Returns the metric context. A metric context is of the form {flowY}.{flowletZ}. */ private String getMetricContext(Program program, TwillContext context) { String metricContext = program.getName(); metricContext += "." + context.getSpecification().getName() + "." + context.getInstanceId(); return metricContext; } }
/** * Returns the metric context. A metric context is of the form * {applicationId}.{programTypeId}.{programId}.{componentId}. So for flows, it will look like * appX.f.flowY.flowletZ. For mapreduce jobs, appX.b.mapredY.{optional m|r}. */ private static Map<String, String> getMetricContext(ProgramId programId, TwillContext context) { Map<String, String> tags = new HashMap<>(); tags.put(Constants.Metrics.Tag.NAMESPACE, programId.getNamespace()); tags.put(Constants.Metrics.Tag.RUN_ID, context.getRunId().getId()); tags.put(Constants.Metrics.Tag.APP, programId.getApplication()); tags.put(ProgramTypeMetricTag.getTagName(programId.getType()), programId.getProgram()); if (programId.getType() == ProgramType.FLOW) { tags.put(Constants.Metrics.Tag.FLOWLET, context.getSpecification().getName()); } return tags; } }
@Override public void run() { String runnableName = getContext().getSpecification().getName(); String serviceName = System.getProperty("service.name"); LOG.info("Announcing with name {} for runnable {}", serviceName, runnableName); // Compute a unique port name based on runnable name (running names are r[0-9]+) getContext().announce(serviceName, 12345 + Integer.parseInt(runnableName.substring(1))); try { stopLatch.await(); } catch (InterruptedException e) { LOG.warn("Run thread interrupted", e); } }
@Override protected Injector doInit(TwillContext context) { name = context.getSpecification().getName(); injector = createGuiceInjector(getCConfiguration(), getConfiguration(), context); // Register shutdown hook to stop Log Saver before Hadoop Filesystem shuts down ShutdownHookManager.get().addShutdownHook(new Runnable() { @Override public void run() { LOG.info("Shutdown hook triggered."); stop(); } }, FileSystem.SHUTDOWN_HOOK_PRIORITY + 1); return injector; }