@Inject @VisibleForTesting public AuthorizerInstantiator(CConfiguration cConf, AuthorizationContextFactory authorizationContextFactory) { this.cConf = cConf; this.authenticationEnabled = cConf.getBoolean(Constants.Security.ENABLED); this.authorizationEnabled = cConf.getBoolean(Constants.Security.Authorization.ENABLED); this.instantiatorFactory = new InstantiatorFactory(false); this.authorizationContextFactory = authorizationContextFactory; }
@Override public Handler get() { // we don't instantiate the handler class via injection, to avoid giving it access to objects bound in guice, // such as SConfiguration return new InstantiatorFactory(false).get(TypeToken.of(handlerClass)).create(); } }
protected ReflectionReader(Schema schema, TypeToken<TO> type) { this.creatorFactory = new InstantiatorFactory(true); this.creators = Maps.newIdentityHashMap(); this.fieldAccessorFactory = new ReflectionFieldAccessorFactory(); this.schema = schema; this.type = type; }
protected ReflectionReader(Schema schema, TypeToken<TO> type) { this.creatorFactory = new InstantiatorFactory(true); this.creators = Maps.newIdentityHashMap(); this.fieldAccessorFactory = new ReflectionFieldAccessorFactory(); this.schema = schema; this.type = type; }
public PluginInstantiator(CConfiguration cConf, ClassLoader parentClassLoader, File pluginDir, boolean filterClassloader) { this.instantiatorFactory = new InstantiatorFactory(false); File tmpDir = new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile(); this.pluginDir = pluginDir; this.tmpDir = DirUtils.createTempDir(tmpDir); this.classLoaders = CacheBuilder.newBuilder() .removalListener(new ClassLoaderRemovalListener()) .build(new ClassLoaderCacheLoader()); this.parentClassLoader = filterClassloader ? PluginClassLoader.createParent(parentClassLoader) : parentClassLoader; this.ownedParentClassLoader = filterClassloader; }
public PluginInstantiator(CConfiguration cConf, ClassLoader parentClassLoader, File pluginDir, boolean filterClassloader) { this.instantiatorFactory = new InstantiatorFactory(false); File tmpDir = new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile(); this.pluginDir = pluginDir; this.tmpDir = DirUtils.createTempDir(tmpDir); this.classLoaders = CacheBuilder.newBuilder() .removalListener(new ClassLoaderRemovalListener()) .build(new ClassLoaderCacheLoader()); this.parentClassLoader = filterClassloader ? PluginClassLoader.createParent(parentClassLoader) : parentClassLoader; this.ownedParentClassLoader = filterClassloader; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected List<HandlerDelegatorContext> createDelegatorContexts() throws Exception { // Constructs all handler delegator. It is for bridging ServiceHttpHandler and HttpHandler (in netty-http). List<HandlerDelegatorContext> delegatorContexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (HttpServiceHandlerSpecification handlerSpec : serviceSpecification.getHandlers().values()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(handlerSpec.getClassName()); @SuppressWarnings("unchecked") TypeToken<HttpServiceHandler> type = TypeToken.of((Class<HttpServiceHandler>) handlerClass); MetricsContext metrics = context.getProgramMetrics().childContext( BasicHttpServiceContext.createMetricsTags(handlerSpec, getInstanceId())); delegatorContexts.add(new HandlerDelegatorContext(type, instantiatorFactory, handlerSpec, contextFactory, metrics)); } return delegatorContexts; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected List<HandlerDelegatorContext> createDelegatorContexts() throws Exception { // Constructs all handler delegator. It is for bridging ServiceHttpHandler and HttpHandler (in netty-http). List<HandlerDelegatorContext> delegatorContexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (HttpServiceHandlerSpecification handlerSpec : serviceSpecification.getHandlers().values()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(handlerSpec.getClassName()); @SuppressWarnings("unchecked") TypeToken<HttpServiceHandler> type = TypeToken.of((Class<HttpServiceHandler>) handlerClass); MetricsContext metrics = context.getProgramMetrics().childContext( BasicHttpServiceContext.createMetricsTags(handlerSpec, getInstanceId())); delegatorContexts.add(new HandlerDelegatorContext(type, instantiatorFactory, handlerSpec, contextFactory, metrics)); } return delegatorContexts; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@Override protected void run() throws Exception { LOG.info("Starting workflow execution for '{}' with Run id '{}'", workflowSpec.getName(), workflowRunId.getRun()); LOG.trace("Workflow specification is {}", workflowSpec); workflowContext.setState(new ProgramState(ProgramStatus.RUNNING, null)); executeAll(workflowSpec.getNodes().iterator(), program.getApplicationSpecification(), new InstantiatorFactory(false), program.getClassLoader(), basicWorkflowToken); if (runningThread != null) { workflowContext.setState(new ProgramState(ProgramStatus.COMPLETED, null)); } LOG.info("Workflow '{}' with run id '{}' completed", workflowSpec.getName(), workflowRunId.getRun()); }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
@Override protected void run() throws Exception { LOG.info("Starting workflow execution for '{}' with Run id '{}'", workflowSpec.getName(), workflowRunId.getRun()); LOG.trace("Workflow specification is {}", workflowSpec); workflowContext.setState(new ProgramState(ProgramStatus.RUNNING, null)); executeAll(workflowSpec.getNodes().iterator(), program.getApplicationSpecification(), new InstantiatorFactory(false), program.getClassLoader(), basicWorkflowToken); if (runningThread != null) { workflowContext.setState(new ProgramState(ProgramStatus.COMPLETED, null)); } LOG.info("Workflow '{}' with run id '{}' completed", workflowSpec.getName(), workflowRunId.getRun()); }
@SuppressWarnings("unchecked") private Workflow initializeWorkflow() throws Exception { Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader()); if (!Workflow.class.isAssignableFrom(clz)) { throw new IllegalStateException(String.format("%s is not Workflow.", clz)); } Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz; final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create(); // set metrics Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics())); if (!(workflow instanceof ProgramLifecycle)) { return workflow; } final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "initialize", WorkflowContext.class); basicWorkflowToken.setCurrentNode(workflowSpec.getName()); workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false); workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken); return workflow; }
@SuppressWarnings("unchecked") private Workflow initializeWorkflow() throws Exception { Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader()); if (!Workflow.class.isAssignableFrom(clz)) { throw new IllegalStateException(String.format("%s is not Workflow.", clz)); } Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz; final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create(); // set metrics Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics())); if (!(workflow instanceof ProgramLifecycle)) { return workflow; } final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "initialize", WorkflowContext.class); basicWorkflowToken.setCurrentNode(workflowSpec.getName()); workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false); workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken); return workflow; }
@Test public void testUnsafe() { Record record = new InstantiatorFactory(false).get(TypeToken.of(Record.class)).create(); Reflections.visit(record, Record.class, new FieldVisitor() { @Override public void visit(Object instance, Type inspectType, Type declareType, Field field) throws Exception { if (!Modifier.isStatic(field.getModifiers())) { Assert.assertEquals(Defaults.defaultValue(field.getType()), field.get(instance)); } } }); }
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(context.getLoggingContext()); // Instantiate worker instance Class<?> workerClass = program.getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<Worker> workerType = (TypeToken<Worker>) TypeToken.of(workerClass); worker = new InstantiatorFactory(false).get(workerType).create(); // Fields injection Reflections.visit(worker, workerType.getType(), new MetricsFieldSetter(context.getMetrics()), new PropertyFieldSetter(spec.getProperties())); LOG.debug("Starting Worker Program {}", program.getId()); // Initialize worker // Worker is always using Explicit transaction TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.EXPLICIT, Worker.class, worker, "initialize", WorkerContext.class); context.initializeProgram(worker, txControl, false); }
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(context.getLoggingContext()); // Instantiate worker instance Class<?> workerClass = program.getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<Worker> workerType = (TypeToken<Worker>) TypeToken.of(workerClass); worker = new InstantiatorFactory(false).get(workerType).create(); // Fields injection Reflections.visit(worker, workerType.getType(), new MetricsFieldSetter(context.getMetrics()), new PropertyFieldSetter(spec.getProperties())); LOG.debug("Starting Worker Program {}", program.getId()); // Initialize worker // Worker is always using Explicit transaction TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.EXPLICIT, Worker.class, worker, "initialize", WorkerContext.class); context.initializeProgram(worker, txControl, false); }