/** * Converts the given file into a local {@link Location}. */ public static Location toLocation(java.nio.file.Path path) { return toLocation(path.toFile()); }
/** * Converts the given file into a local {@link Location}. */ public static Location toLocation(java.nio.file.Path path) { return toLocation(path.toFile()); }
private void createResourcesJar(ApplicationBundler bundler, Map<String, LocalFile> localFiles, Path stagingDir) throws IOException { // If there is no resources, no need to create the jar file. if (resources.isEmpty()) { return; } LOG.debug("Create and copy {}", Constants.Files.RESOURCES_JAR); Location location = Locations.toLocation(Files.createTempFile(stagingDir, Constants.Files.RESOURCES_JAR, null)); bundler.createBundle(location, Collections.emptyList(), resources); LOG.debug("Done {}", Constants.Files.RESOURCES_JAR); localFiles.put(Constants.Files.RESOURCES_JAR, createLocalFile(Constants.Files.RESOURCES_JAR, location, true)); }
private void createResourcesJar(ApplicationBundler bundler, Map<String, LocalFile> localFiles, Path stagingDir) throws IOException { // If there is no resources, no need to create the jar file. if (resources.isEmpty()) { return; } LOG.debug("Create and copy {}", Constants.Files.RESOURCES_JAR); Location location = Locations.toLocation(Files.createTempFile(stagingDir, Constants.Files.RESOURCES_JAR, null)); bundler.createBundle(location, Collections.emptyList(), resources); LOG.debug("Done {}", Constants.Files.RESOURCES_JAR); localFiles.put(Constants.Files.RESOURCES_JAR, createLocalFile(Constants.Files.RESOURCES_JAR, location, true)); }
/** * Same as {@link #createLogbackJar(Location)} except this method uses local {@link File} instead. */ @Nullable public static File createLogbackJar(File targetFile) throws IOException { Location logbackJar = createLogbackJar(Locations.toLocation(targetFile)); return logbackJar == null ? null : new File(logbackJar.toURI()); }
/** * Same as {@link #createLogbackJar(Location)} except this method uses local {@link File} instead. */ @Nullable public static File createLogbackJar(File targetFile) throws IOException { Location logbackJar = createLogbackJar(Locations.toLocation(targetFile)); return logbackJar == null ? null : new File(logbackJar.toURI()); }
private AuthorizerClassLoader createAuthorizerClassLoader(File authorizerExtensionJar, @Nullable String authorizerExtraClasspath) throws IOException, InvalidAuthorizerException { LOG.info("Creating authorization extension using jar {}.", authorizerExtensionJar); try { BundleJarUtil.unJar(Locations.toLocation(authorizerExtensionJar), tmpDir); return new AuthorizerClassLoader(tmpDir, authorizerExtraClasspath); } catch (ZipException e) { throw new InvalidAuthorizerException( String.format("Authorization extension jar %s specified as %s must be a jar file.", authorizerExtensionJar, Constants.Security.Authorization.EXTENSION_JAR_PATH), e ); } }
@Override public void start() { try { // Use local directory for caching generated jar files Path tempDir = Files.createDirectories(Paths.get(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).toAbsolutePath()); cachePath = Files.createTempDirectory(tempDir, "runner.cache"); locationCache = new BasicLocationCache(Locations.toLocation(cachePath)); } catch (IOException e) { throw new RuntimeException(e); } monitorSocksProxy.startAndWait(); monitorScheduler = Executors.newScheduledThreadPool(cConf.getInt(Constants.RuntimeMonitor.THREADS), Threads.createDaemonThreadFactory("runtime-monitor-%d")); long startMillis = System.currentTimeMillis(); Thread t = new Thread(() -> initializeRuntimeMonitors(startMillis), "runtime-monitor-initializer"); t.setDaemon(true); t.start(); }
@Override protected void response(HttpServiceResponder responder) throws IOException { responder.send(200, Locations.toLocation(targetFile), "text/plain"); } };
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }
/** * Creates a program {@link ClassLoader} based on the MR job config. */ private static ClassLoader createProgramClassLoader(MapReduceContextConfig contextConfig) { // In distributed mode, the program is created by expanding the program jar. // The program jar is localized to container with the program jar name. // It's ok to expand to a temp dir in local directory, as the YARN container will be gone. Location programLocation = Locations.toLocation(new File(contextConfig.getProgramJarName())); try { File unpackDir = DirUtils.createTempDir(new File(System.getProperty("user.dir"))); LOG.info("Create ProgramClassLoader from {}, expand to {}", programLocation, unpackDir); BundleJarUtil.unJar(programLocation, unpackDir); return new ProgramClassLoader(contextConfig.getCConf(), unpackDir, FilterClassLoader.create(contextConfig.getHConf().getClassLoader())); } catch (IOException e) { LOG.error("Failed to create ProgramClassLoader", e); throw Throwables.propagate(e); } }
@Path("/download/{file}") @GET @TransactionPolicy(TransactionControl.EXPLICIT) public void download(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("file") String file) { Assert.assertNull(System.getProperty(IN_TX)); Location location = Locations.toLocation(new File(outputDir, file)); try { responder.send(200, location, "text/plain"); } catch (IOException e) { responder.sendStatus(500); } }
/** * Improper Manifest file should throw an exception. */ @Test(expected = ExecutionException.class) public void testImproperOrNoManifestFile() throws Exception { // Create an JAR without the MainClass set. File deployFile = TMP_FOLDER.newFile(); try (JarOutputStream output = new JarOutputStream(new FileOutputStream(deployFile), new Manifest())) { output.putNextEntry(new JarEntry("dummy")); } Location jarLoc = Locations.toLocation(deployFile); ArtifactId artifactId = new ArtifactId("dummy", new ArtifactVersion("1.0.0-SNAPSHOT"), ArtifactScope.USER); ArtifactDescriptor artifactDescriptor = new ArtifactDescriptor(artifactId, jarLoc); AppDeploymentInfo info = new AppDeploymentInfo(artifactDescriptor, NamespaceId.DEFAULT, "some.class.name", null, null, null); AppFabricTestHelper.getLocalManager().deploy(info).get(); }
@Test public void testExtraClassPath() throws IOException, ClassNotFoundException { File tmpDir = TMP_FOLDER.newFolder(); // Create two jars, one with guava, one with gson ApplicationBundler bundler = new ApplicationBundler(new ClassAcceptor()); Location guavaJar = Locations.toLocation(new File(tmpDir, "guava.jar")); bundler.createBundle(guavaJar, ImmutableList.class); Location gsonJar = Locations.toLocation(new File(tmpDir, "gson.jar")); bundler.createBundle(gsonJar, Gson.class); // Unpack them File guavaDir = BundleJarUtil.unJar(guavaJar, TMP_FOLDER.newFolder()); File gsonDir = BundleJarUtil.unJar(gsonJar, TMP_FOLDER.newFolder()); // Create a DirectoryClassLoader using guava dir as the main directory, with the gson dir in the extra classpath String extraClassPath = gsonDir.getAbsolutePath() + File.pathSeparatorChar + gsonDir.getAbsolutePath() + "/lib/*"; ClassLoader cl = new DirectoryClassLoader(guavaDir, extraClassPath, null, Arrays.asList("lib")); // Should be able to load both guava and gson class from the class loader cl.loadClass(ImmutableList.class.getName()); cl.loadClass(Gson.class.getName()); }
@Test(expected = InvalidArtifactException.class) public void testInvalidConfigApp() throws Exception { Manifest manifest = new Manifest(); File appFile = createJar(InvalidConfigApp.class, new File(TMP_FOLDER.newFolder(), "InvalidConfigApp-1.0.0.jar"), manifest); Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "InvalidConfigApp", "1.0.0"); Location artifactLocation = Locations.toLocation(appFile); try (CloseableClassLoader artifactClassLoader = classLoaderFactory.createClassLoader( ImmutableList.of(artifactLocation).iterator(), new EntityImpersonator(artifactId.toEntityId(), new DefaultImpersonator(CConfiguration.create(), null)))) { artifactInspector.inspectArtifact(artifactId, appFile, artifactClassLoader); } }
@Test public void testScanForTasks() throws Exception { // write state for a provision operation that is polling for the cluster to be created TaskFields taskFields = createTaskInfo(new MockProvisioner.PropertyBuilder().build()); ProvisioningOp op = new ProvisioningOp(ProvisioningOp.Type.PROVISION, ProvisioningOp.Status.POLLING_CREATE); Cluster cluster = new Cluster("name", ClusterStatus.CREATING, Collections.emptyList(), Collections.emptyMap()); ProvisioningTaskInfo taskInfo = new ProvisioningTaskInfo(taskFields.programRunId, taskFields.programDescriptor, taskFields.programOptions, Collections.emptyMap(), MockProvisioner.NAME, "Bob", op, Locations.toLocation(TEMP_FOLDER.newFolder()).toURI(), cluster); transactional.execute(dsContext -> { ProvisionerDataset provisionerDataset = ProvisionerDataset.get(dsContext, datasetFramework); provisionerDataset.putTaskInfo(taskInfo); }); provisioningService.resumeTasks(t -> { }); ProvisioningTaskKey taskKey = new ProvisioningTaskKey(taskFields.programRunId, ProvisioningOp.Type.PROVISION); waitForExpectedProvisioningState(taskKey, ProvisioningOp.Status.CREATED); }
@Override protected ArtifactDetail getArtifactDetail(ArtifactId artifactId) throws IOException, ArtifactNotFoundException { co.cask.cdap.api.artifact.ArtifactId id = new co.cask.cdap.api.artifact.ArtifactId( "dummy", new ArtifactVersion("1.0"), ArtifactScope.USER); return new ArtifactDetail(new ArtifactDescriptor(id, Locations.toLocation(TEMP_FOLDER.newFile())), new ArtifactMeta(ArtifactClasses.builder().build())); } };
@Override protected ArtifactDetail getArtifactDetail(ArtifactId artifactId) throws IOException, ArtifactNotFoundException { co.cask.cdap.api.artifact.ArtifactId id = new co.cask.cdap.api.artifact.ArtifactId( "dummy", new ArtifactVersion("1.0"), ArtifactScope.USER); return new ArtifactDetail(new ArtifactDescriptor(id, Locations.toLocation(TEMP_FOLDER.newFile())), new ArtifactMeta(ArtifactClasses.builder().build())); } };