/** * Gets the JobID by which this environment is identified, as a string. * * @return The JobID as a string. * @see #getId() */ @PublicEvolving public String getIdString() { return this.jobID.toString(); }
/** * Creates a program target description from deployment classes. * * @param clusterId cluster id * @param jobId job id * @param <C> cluster id type * @return program target descriptor */ public static <C> ProgramTargetDescriptor of(C clusterId, JobID jobId, String webInterfaceUrl) { String clusterIdString; try { // check if cluster id has a toString method clusterId.getClass().getDeclaredMethod("toString"); clusterIdString = clusterId.toString(); } catch (NoSuchMethodException e) { clusterIdString = clusterId.getClass().getSimpleName(); } return new ProgramTargetDescriptor(clusterIdString, jobId.toString(), webInterfaceUrl); } }
private static Optional<Path> findExternalizedCheckpoint(File checkpointDir, JobID jobId) throws IOException { try (Stream<Path> checkpoints = Files.list(checkpointDir.toPath().resolve(jobId.toString()))) { return checkpoints .filter(path -> path.getFileName().toString().startsWith("chk-")) .filter(path -> { try (Stream<Path> checkpointFiles = Files.list(path)) { return checkpointFiles.anyMatch(child -> child.getFileName().toString().contains("meta")); } catch (IOException ignored) { return false; } }) .findAny(); } }
@Test public void testModifyJob() throws Exception { final JobID jobId = new JobID(); final int parallelism = 42; String[] args = {jobId.toString(), "-p", String.valueOf(parallelism)}; Tuple2<JobID, Integer> jobIdParallelism = callModify(args); assertThat(jobIdParallelism.f0, Matchers.is(jobId)); assertThat(jobIdParallelism.f1, Matchers.is(parallelism)); }
@Test public void testStop() throws Exception { // test stop properly JobID jid = new JobID(); String jidString = jid.toString(); String[] parameters = { jidString }; final ClusterClient<String> clusterClient = createClusterClient(null); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.stop(parameters); Mockito.verify(clusterClient, times(1)).stop(any(JobID.class)); }
@Test public void testCancel() throws Exception { // test cancel properly JobID jid = new JobID(); String[] parameters = { jid.toString() }; final ClusterClient<String> clusterClient = createClusterClient(); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.cancel(parameters); Mockito.verify(clusterClient, times(1)).cancel(any(JobID.class)); }
@Test public void testMissingParallelism() throws Exception { final JobID jobId = new JobID(); final String[] args = {jobId.toString()}; try { callModify(args); fail("Expected CliArgsException"); } catch (CliArgsException expected) { // expected } }
@Test public void testUnparsableParalllelism() throws Exception { final JobID jobId = new JobID(); final String[] args = {jobId.toString(), "-p", "foobar"}; try { callModify(args); fail("Expected CliArgsException"); } catch (CliArgsException expected) { // expected } }
json.writeEndObject(); json.writeStringField("job_id", result.getJobID().toString()); json.writeNumberField("runtime_ms", result.getNetRuntime());
/** * Tests that a CLI call with a custom savepoint directory target is * forwarded correctly to the cluster client. */ @Test public void testTriggerSavepointCustomTarget() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String savepointDirectory = "customTargetDirectory"; final ClusterClient<String> clusterClient = createClusterClient(savepointDirectory); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = { jobId.toString(), savepointDirectory }; frontend.savepoint(parameters); verify(clusterClient, times(1)) .triggerSavepoint(eq(jobId), eq(savepointDirectory)); assertTrue(buffer.toString().contains(savepointDirectory)); } finally { clusterClient.shutdown(); restoreStdOutAndStdErr(); } }
/** * Tests cancelling with the savepoint option. */ @Test public void testCancelWithSavepoint() throws Exception { { // Cancel with savepoint (no target directory) JobID jid = new JobID(); String[] parameters = { "-s", jid.toString() }; final ClusterClient<String> clusterClient = createClusterClient(); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.cancel(parameters); Mockito.verify(clusterClient, times(1)) .cancelWithSavepoint(any(JobID.class), isNull(String.class)); } { // Cancel with savepoint (with target directory) JobID jid = new JobID(); String[] parameters = { "-s", "targetDirectory", jid.toString() }; final ClusterClient<String> clusterClient = createClusterClient(); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.cancel(parameters); Mockito.verify(clusterClient, times(1)) .cancelWithSavepoint(any(JobID.class), notNull(String.class)); } }
@Test public void testTriggerSavepointSuccess() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String savepointPath = "expectedSavepointPath"; final ClusterClient<String> clusterClient = createClusterClient(savepointPath); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = { jobId.toString() }; frontend.savepoint(parameters); verify(clusterClient, times(1)) .triggerSavepoint(eq(jobId), isNull(String.class)); assertTrue(buffer.toString().contains(savepointPath)); } finally { clusterClient.shutdown(); restoreStdOutAndStdErr(); } }
@Test public void testUnknownJobId() throws Exception { // test unknown job Id JobID jid = new JobID(); String[] parameters = { jid.toString() }; String expectedMessage = "Test exception"; FlinkException testException = new FlinkException(expectedMessage); final ClusterClient<String> clusterClient = createClusterClient(testException); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); try { testFrontend.stop(parameters); fail("Should have failed."); } catch (FlinkException e) { assertTrue(ExceptionUtils.findThrowableWithMessage(e, expectedMessage).isPresent()); } }
@Test public void testTriggerSavepointFailure() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String expectedTestException = "expectedTestException"; Exception testException = new Exception(expectedTestException); final ClusterClient<String> clusterClient = createFailingClusterClient(testException); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = { jobId.toString() }; try { frontend.savepoint(parameters); fail("Savepoint should have failed."); } catch (FlinkException e) { assertTrue(ExceptionUtils.findThrowableWithMessage(e, expectedTestException).isPresent()); } } finally { clusterClient.shutdown(); restoreStdOutAndStdErr(); } }
@Test public void testTriggerSavepointForNonExistingJob() throws Exception { // Config final int numTaskManagers = 1; final int numSlotsPerTaskManager = 1; final Configuration config = new Configuration(); config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString()); final MiniClusterWithClientResource cluster = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(config) .setNumberTaskManagers(numTaskManagers) .setNumberSlotsPerTaskManager(numSlotsPerTaskManager) .build()); cluster.before(); final ClusterClient<?> client = cluster.getClusterClient(); final JobID jobID = new JobID(); try { client.triggerSavepoint(jobID, null).get(); fail(); } catch (ExecutionException e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, jobID.toString()).isPresent()); } finally { cluster.after(); } }
try (JsonArray finished = new JsonArray(gen, "finished")) { try (JsonObject job = new JsonObject(gen)) { gen.writeStringField("jid", jobID.toString()); gen.writeStringField("name", "testjob"); gen.writeStringField("state", JobStatus.FINISHED.name());
} catch (ExecutionException e) { assertTrue(ExceptionUtils.findThrowable(e, IllegalStateException.class).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, graph.getJobID().toString()).isPresent()); assertTrue(ExceptionUtils.findThrowableWithMessage(e, "is not a streaming job").isPresent()); } finally {
final String resultId = jobGraph.getJobID().toString(); resultStore.storeResult(resultId, result);
/** * Gets the JobID by which this environment is identified, as a string. * * @return The JobID as a string. * @see #getId() */ @PublicEvolving public String getIdString() { return this.jobID.toString(); }
@Override public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException { ResponseBody json = createJobAccumulatorsInfo(graph, true); String path = getMessageHeaders().getTargetRestEndpointURL() .replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString()); return Collections.singleton(new ArchivedJson(path, json)); }