@Override public Map<String, Plugin> getPlugins() { return delegate.getPlugins(); } }
@Override public Map<String, Plugin> getPlugins() { return delegate.getPlugins(); }
/** * Get detail about the plugin in the specified application * * @param appId the id of the application * @return list of plugins in the application * @throws ApplicationNotFoundException if the specified application does not exist */ public List<PluginInstanceDetail> getPlugins(ApplicationId appId) throws ApplicationNotFoundException { ApplicationSpecification appSpec = store.getApplication(appId); if (appSpec == null) { throw new ApplicationNotFoundException(appId); } List<PluginInstanceDetail> pluginInstanceDetails = new ArrayList<>(); for (Map.Entry<String, Plugin> entry : appSpec.getPlugins().entrySet()) { pluginInstanceDetails.add(new PluginInstanceDetail(entry.getKey(), entry.getValue())); } return pluginInstanceDetails; }
/** * Get detail about the plugin in the specified application * * @param appId the id of the application * @return list of plugins in the application * @throws ApplicationNotFoundException if the specified application does not exist */ public List<PluginInstanceDetail> getPlugins(ApplicationId appId) throws ApplicationNotFoundException { ApplicationSpecification appSpec = store.getApplication(appId); if (appSpec == null) { throw new ApplicationNotFoundException(appId); } List<PluginInstanceDetail> pluginInstanceDetails = new ArrayList<>(); for (Map.Entry<String, Plugin> entry : appSpec.getPlugins().entrySet()) { pluginInstanceDetails.add(new PluginInstanceDetail(entry.getKey(), entry.getValue())); } return pluginInstanceDetails; }
/** * Creates a new instance of {@link ClassLoader} that will be used for program method invocation. * By default it is a {@link CombineClassLoader} with program classloader, * plugins export-package classloader and system classloader in that loading order. */ protected ClassLoader createProgramInvocationClassLoader() { // A classloader that can load all export-package classes from all plugins ClassLoader pluginsClassLoader = PluginClassLoaders.createFilteredPluginsClassLoader( program.getApplicationSpecification().getPlugins(), pluginInstantiator); return new CombineClassLoader(null, program.getClassLoader(), pluginsClassLoader, getClass().getClassLoader()); }
/** * Creates a new instance of {@link ClassLoader} that will be used for program method invocation. * By default it is a {@link CombineClassLoader} with program classloader, * plugins export-package classloader and system classloader in that loading order. */ protected ClassLoader createProgramInvocationClassLoader() { // A classloader that can load all export-package classes from all plugins ClassLoader pluginsClassLoader = PluginClassLoaders.createFilteredPluginsClassLoader( program.getApplicationSpecification().getPlugins(), pluginInstantiator); return new CombineClassLoader(null, program.getClassLoader(), pluginsClassLoader, getClass().getClassLoader()); }
@Override public Map<String, String> getSystemPropertiesToAdd() { ImmutableMap.Builder<String, String> properties = ImmutableMap.builder(); properties.put(ENTITY_NAME_KEY, appSpec.getName()); properties.put(VERSION_KEY, appId.getVersion()); String description = appSpec.getDescription(); if (!Strings.isNullOrEmpty(description)) { properties.put(DESCRIPTION_KEY, description); } if (!existing) { properties.put(CREATION_TIME_KEY, String.valueOf(System.currentTimeMillis())); } addPrograms(properties); addSchedules(properties); // appSpec.getPlugins() returns all instances of all plugins, so there may be duplicates. // we only store unique plugins right now Set<PluginClass> existingPluginClasses = new HashSet<>(); for (Plugin plugin : appSpec.getPlugins().values()) { if (!existingPluginClasses.contains(plugin.getPluginClass())) { addPlugin(plugin.getPluginClass(), null, properties); existingPluginClasses.add(plugin.getPluginClass()); } } return properties.build(); }
/** * Creates the delegating list of ClassLoader. Used by constructor only. */ private static List<ClassLoader> createDelegateClassLoaders(SparkRuntimeContext context) { return Arrays.asList( context.getProgram().getClassLoader(), PluginClassLoaders.createFilteredPluginsClassLoader(context.getApplicationSpecification().getPlugins(), context.getPluginInstantiator()), SparkClassLoader.class.getClassLoader() ); } }
/** * Creates the delegating list of ClassLoader. Used by constructor only. */ private static List<ClassLoader> createDelegateClassLoaders(SparkRuntimeContext context) { return Arrays.asList( context.getProgram().getClassLoader(), PluginClassLoaders.createFilteredPluginsClassLoader(context.getApplicationSpecification().getPlugins(), context.getPluginInstantiator()), SparkClassLoader.class.getClassLoader() ); } }
/** * Creates the delegating list of ClassLoader. Used by constructor only. */ private static List<ClassLoader> createDelegateClassLoaders(SparkRuntimeContext context) { return Arrays.asList( context.getProgram().getClassLoader(), PluginClassLoaders.createFilteredPluginsClassLoader(context.getApplicationSpecification().getPlugins(), context.getPluginInstantiator()), SparkClassLoader.class.getClassLoader() ); } }
@Override public Map<String, String> getSystemPropertiesToAdd() { ImmutableMap.Builder<String, String> properties = ImmutableMap.builder(); properties.put(ENTITY_NAME_KEY, appSpec.getName()); properties.put(VERSION_KEY, appId.getVersion()); String description = appSpec.getDescription(); if (!Strings.isNullOrEmpty(description)) { properties.put(DESCRIPTION_KEY, description); } properties.put(CREATION_TIME_KEY, creationTime); addPrograms(properties); addSchedules(properties); // appSpec.getPlugins() returns all instances of all plugins, so there may be duplicates. // we only store unique plugins right now Set<PluginClass> existingPluginClasses = new HashSet<>(); for (Plugin plugin : appSpec.getPlugins().values()) { if (!existingPluginClasses.contains(plugin.getPluginClass())) { SystemMetadataProvider.addPlugin(plugin.getPluginClass(), null, properties); existingPluginClasses.add(plugin.getPluginClass()); } } return properties.build(); }
@Nullable ApplicationSpecification appSpec) throws Exception { if (appSpec == null || appSpec.getPlugins().isEmpty()) { return options; ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); builder.putAll(options.getArguments().asMap()); for (Map.Entry<String, Plugin> pluginEntry : appSpec.getPlugins().entrySet()) { Plugin plugin = pluginEntry.getValue(); File destFile = new File(tempDir, Artifacts.getFileName(plugin.getArtifactId()));
@Nullable ApplicationSpecification appSpec) throws Exception { if (appSpec == null || appSpec.getPlugins().isEmpty()) { return options; ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); builder.putAll(options.getArguments().asMap()); for (Map.Entry<String, Plugin> pluginEntry : appSpec.getPlugins().entrySet()) { Plugin plugin = pluginEntry.getValue(); File destFile = new File(tempDir, Artifacts.getFileName(plugin.getArtifactId()));
@Override public JsonElement serialize(ApplicationSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("appVersion", new JsonPrimitive(src.getAppVersion())); if (src.getConfiguration() != null) { jsonObj.add("configuration", new JsonPrimitive(src.getConfiguration())); } jsonObj.add("artifactId", context.serialize(src.getArtifactId())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("datasetModules", serializeMap(src.getDatasetModules(), context, String.class)); jsonObj.add("datasetInstances", serializeMap(src.getDatasets(), context, DatasetCreationSpec.class)); jsonObj.add("mapReduces", serializeMap(src.getMapReduce(), context, MapReduceSpecification.class)); jsonObj.add("sparks", serializeMap(src.getSpark(), context, SparkSpecification.class)); jsonObj.add("workflows", serializeMap(src.getWorkflows(), context, WorkflowSpecification.class)); jsonObj.add("services", serializeMap(src.getServices(), context, ServiceSpecification.class)); jsonObj.add("programSchedules", serializeMap(src.getProgramSchedules(), context, ScheduleCreationSpec.class)); jsonObj.add("workers", serializeMap(src.getWorkers(), context, WorkerSpecification.class)); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); return jsonObj; }
@Override public JsonElement serialize(ApplicationSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("appVersion", new JsonPrimitive(src.getAppVersion())); if (src.getConfiguration() != null) { jsonObj.add("configuration", new JsonPrimitive(src.getConfiguration())); } jsonObj.add("artifactId", context.serialize(src.getArtifactId())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("streams", serializeMap(src.getStreams(), context, StreamSpecification.class)); jsonObj.add("datasetModules", serializeMap(src.getDatasetModules(), context, String.class)); jsonObj.add("datasetInstances", serializeMap(src.getDatasets(), context, DatasetCreationSpec.class)); jsonObj.add("flows", serializeMap(src.getFlows(), context, FlowSpecification.class)); jsonObj.add("mapReduces", serializeMap(src.getMapReduce(), context, MapReduceSpecification.class)); jsonObj.add("sparks", serializeMap(src.getSpark(), context, SparkSpecification.class)); jsonObj.add("workflows", serializeMap(src.getWorkflows(), context, WorkflowSpecification.class)); jsonObj.add("services", serializeMap(src.getServices(), context, ServiceSpecification.class)); jsonObj.add("programSchedules", serializeMap(src.getProgramSchedules(), context, ScheduleCreationSpec.class)); jsonObj.add("workers", serializeMap(src.getWorkers(), context, WorkerSpecification.class)); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); return jsonObj; }
for (Map.Entry<String, Plugin> pluginEnty : spec.getPlugins().entrySet()) { plugins.add(new PluginDetail(pluginEnty.getKey(), pluginEnty.getValue().getPluginClass().getName(),
for (Map.Entry<String, Plugin> pluginEnty : spec.getPlugins().entrySet()) { plugins.add(new PluginDetail(pluginEnty.getKey(), pluginEnty.getValue().getPluginClass().getName(),
program.getApplicationSpecification().getPlugins());
/** * Updates the {@link Configuration} of this class with the given parameters. * * @param context the context for the MapReduce program * @param conf the CDAP configuration * @param programJarURI The URI of the program JAR * @param localizedUserResources the localized resources for the MapReduce program */ public void set(BasicMapReduceContext context, CConfiguration conf, URI programJarURI, Map<String, String> localizedUserResources) { setProgramOptions(context.getProgramOptions()); setProgramId(context.getProgram().getId()); setApplicationSpecification(context.getApplicationSpecification()); setWorkflowProgramInfo(context.getWorkflowInfo()); setPlugins(context.getApplicationSpecification().getPlugins()); setProgramJarURI(programJarURI); setConf(conf); setLocalizedResources(localizedUserResources); setOutputs(context.getOutputs()); }
/** * Updates the {@link Configuration} of this class with the given parameters. * * @param context the context for the MapReduce program * @param conf the CDAP configuration * @param programJarURI The URI of the program JAR * @param localizedUserResources the localized resources for the MapReduce program */ public void set(BasicMapReduceContext context, CConfiguration conf, URI programJarURI, Map<String, String> localizedUserResources) { setProgramOptions(context.getProgramOptions()); setProgramId(context.getProgram().getId()); setApplicationSpecification(context.getApplicationSpecification()); setWorkflowProgramInfo(context.getWorkflowInfo()); setPlugins(context.getApplicationSpecification().getPlugins()); setProgramJarURI(programJarURI); setConf(conf); setLocalizedResources(localizedUserResources); setOutputs(context.getOutputs()); }