/** * Convert a string representation of a job id into an object while validating that the string is correctly * formatted. Throws an IllegalArgumentException if the string is invalid. * * @param jobIdStr String representation of a job id. * @return converted JobId based on the input string. */ public static JobId fromString(String jobIdStr) { int index = jobIdStr.indexOf("-"); Preconditions.checkArgument(index > 0, "invalid job id string " + jobIdStr); String clusterId = jobIdStr.substring(0, index); Preconditions.checkArgument(jobIdStr.indexOf("-", index + 1) < 0, "invalid job id string " + jobIdStr); long jobNum = Long.valueOf(jobIdStr.substring(index + 1)); return new JobId(clusterId, jobNum); }
JobId jobId = JobId.fromString(planId); ClusterJob clusterJob = clusterStore.getClusterJob(jobId);
public TaskId(JobId jobId, long taskNum) { this.jobId = jobId; this.taskNum = taskNum; this.id = String.format("%s-%03d", jobId.getId(), taskNum); }
/** * Create a cluster job with the given job id that represent the given action to perform on a cluster, for the given * services on the given nodes. Null values for services or nodes indicates that the job covers all cluster services * or all cluster nodes. * * @param jobId Id of the job. * @param clusterAction Action the job is carrying out. * @param plannedServices Services affected by the job, with null indicating all services. * @param plannedNodes Nodes affected by the job, with null indicating all nodes. */ public ClusterJob(JobId jobId, ClusterAction clusterAction, Set<String> plannedServices, Set<String> plannedNodes) { this.jobId = jobId.getId(); this.clusterId = jobId.getClusterId(); this.clusterAction = clusterAction; this.stagedTasks = Lists.newArrayList(); this.currentStageNumber = 0; this.jobStatus = Status.NOT_SUBMITTED; this.plannedServices = plannedServices; this.plannedNodes = plannedNodes; taskStatus = Maps.newHashMap(); }
@Override public void deleteClusterJob(JobId jobId) throws IOException { try { Connection conn = dbConnectionPool.getConnection(); try { PreparedStatement statement = conn.prepareStatement("DELETE FROM jobs WHERE job_num=? AND cluster_id=?"); try { statement.setLong(1, jobId.getJobNum()); statement.setLong(2, Long.parseLong(jobId.getClusterId())); statement.executeUpdate(); } finally { statement.close(); } } finally { conn.close(); } } catch (SQLException e) { throw new IOException(e); } }
@Override public void writeClusterJob(ClusterJob clusterJob) throws IOException { JobId jobId = JobId.fromString(clusterJob.getJobId()); long clusterId = Long.parseLong(jobId.getClusterId()); try { Connection conn = dbConnectionPool.getConnection(); try { byte[] jobBytes = dbQueryExecutor.toBytes(clusterJob, ClusterJob.class); DBPut jobPut = new ClusterJobDBPut(clusterJob, jobBytes, jobId, clusterId); jobPut.executePut(conn); } finally { conn.close(); } } catch (SQLException e) { throw new IOException(e); } }
@Test public void testGetStoreDeleteClusterAsSystem() throws Exception { Cluster cluster = Cluster.builder() .setID("104") .setAccount(tenant1_user1) .setName("example-hdfs-delete") .setProvider(Entities.ProviderExample.RACKSPACE) .setClusterTemplate(Entities.ClusterTemplateExample.HDFS) .setNodes(ImmutableSet.of("node1", "node2")) .setServices(ImmutableSet.of("s1", "s2")) .build(); cluster.setLatestJobId(new JobId(cluster.getId(), 1).getId()); assertGetStoreDeleteCluster(systemView, cluster); }
@Override protected PreparedStatement getSetClusterStatement( Connection conn, long id, Cluster cluster, byte[] clusterBytes) throws SQLException { PreparedStatement statement = conn.prepareStatement( "UPDATE clusters SET cluster=?, owner_id=?, tenant_id=?, status=?, expire_time=?, latest_job_num=? WHERE id=?"); statement.setBytes(1, clusterBytes); statement.setString(2, cluster.getAccount().getUserId()); statement.setString(3, cluster.getAccount().getTenantId()); statement.setString(4, cluster.getStatus().name()); statement.setTimestamp(5, DBHelper.getTimestamp(cluster.getExpireTime())); statement.setLong(6, JobId.fromString(cluster.getLatestJobId()).getJobNum()); // where clause statement.setLong(7, id); return statement; }
@Test public void testGetClusterJobs() throws Exception { Cluster cluster = createClusterObj("1"); ClusterStoreView user1view = clusterStoreService.getView(cluster.getAccount()); user1view.writeCluster(cluster); Set<ClusterJob> jobs = Sets.newHashSet(); for (int i = 0; i < 10; i++) { JobId jobId = new JobId(cluster.getId(), i); ClusterJob job = new ClusterJob(jobId, ClusterAction.RESTART_SERVICES); systemView.writeClusterJob(job); jobs.add(job); } // this job shouldn't get fetched ClusterJob randomJob = new ClusterJob(JobId.fromString("2123-0214"), ClusterAction.CLUSTER_CONFIGURE); systemView.writeClusterJob(randomJob); // shouldn't be able to get since the cluster isn't owned by this user Assert.assertTrue(clusterStoreService.getView(tenant1_user2).getClusterJobs(cluster.getId(), -1).isEmpty()); // check we can get all the jobs Assert.assertEquals(Sets.newHashSet(user1view.getClusterJobs(cluster.getId(), -1)), jobs); Assert.assertEquals(Sets.newHashSet(systemView.getClusterJobs(cluster.getId(), -1)), jobs); // check the limit List<ClusterJob> fetchedJobs = user1view.getClusterJobs(cluster.getId(), 5); Assert.assertEquals(5, fetchedJobs.size()); Assert.assertTrue(jobs.containsAll(fetchedJobs)); fetchedJobs = systemView.getClusterJobs(cluster.getId(), 5); Assert.assertEquals(5, fetchedJobs.size()); Assert.assertTrue(jobs.containsAll(fetchedJobs)); }
public long getJobNum() { return jobId.getJobNum(); }
public String getClusterId() { return jobId.getClusterId(); }
JobId jobId = new JobId(clusterId, jobNum); ClusterJob job = new ClusterJob(jobId, action); cluster.setLatestJobId(jobId.getId()); taskService.completeJob(job, cluster); jobNum++; JobId jobId = new JobId(clusterId, jobNum); ClusterJob job = new ClusterJob(jobId, ClusterAction.CLUSTER_DELETE); cluster.setLatestJobId(jobId.getId()); taskService.completeJob(job, cluster); Assert.assertTrue(credentialStore.get(account.getTenantId(), clusterId).isEmpty());
private PreparedStatement getInsertClusterStatement( Connection conn, long id, Cluster cluster, byte[] clusterBytes) throws SQLException { PreparedStatement statement = conn.prepareStatement( "INSERT INTO clusters (cluster, owner_id, tenant_id, status, expire_time," + " create_time, name, id, latest_job_num) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"); statement.setBytes(1, clusterBytes); statement.setString(2, cluster.getAccount().getUserId()); statement.setString(3, cluster.getAccount().getTenantId()); statement.setString(4, cluster.getStatus().name()); statement.setTimestamp(5, DBHelper.getTimestamp(cluster.getExpireTime())); statement.setTimestamp(6, DBHelper.getTimestamp(cluster.getCreateTime())); statement.setString(7, cluster.getName()); statement.setLong(8, id); String latestJobStr = cluster.getLatestJobId(); long latestJobNum = latestJobStr == null ? 0 : JobId.fromString(latestJobStr).getJobNum(); statement.setLong(9, latestJobNum); return statement; }
JobId jobId = JobId.fromString(jobIdStr); Lock lock = lockService.getJobLock(queueName, jobId.getClusterId()); lock.lock(); try {
@Override public ClusterJob getClusterJob(JobId jobId) throws IOException { try { Connection conn = dbConnectionPool.getConnection(); try { PreparedStatement statement = conn.prepareStatement("SELECT job FROM jobs WHERE job_num=? AND cluster_id=?"); try { statement.setLong(1, jobId.getJobNum()); statement.setLong(2, Long.parseLong(jobId.getClusterId())); return dbQueryExecutor.getQueryItem(statement, ClusterJob.class); } finally { statement.close(); } } finally { conn.close(); } } catch (SQLException e) { LOG.error("Exception getting cluster job {}", jobId, e); throw new IOException("Exception getting cluster job " + jobId, e); } }
@Override public PreparedStatement createUpdateStatement(Connection conn) throws SQLException { PreparedStatement updateStatement = conn.prepareStatement("UPDATE jobs SET job=?, status=? WHERE job_num=? AND cluster_id=?"); updateStatement.setBytes(1, jobBytes); updateStatement.setString(2, clusterJob.getJobStatus().name()); updateStatement.setLong(3, jobId.getJobNum()); updateStatement.setLong(4, clusterId); return updateStatement; }
for (int i = retryActionIndex; i < currentActionIndex; ++i) { ProvisionerAction action = taskOrder.get(i); TaskId retryTaskId = idService.getNewTaskId(JobId.fromString(task.getJobId())); ClusterTask retry = new ClusterTask(action, retryTaskId, task.getNodeId(), task.getService(), task.getClusterAction(), task.getClusterTemplateName(), task.getAccount());
String cluster1ID = "123"; String cluster2ID = "321"; JobId jobid1 = new JobId(cluster1ID, 1); JobId jobid2 = new JobId(cluster2ID, 2); Cluster cluster1 = Cluster.builder() .setName("cluster1") .setID(cluster1ID) .setLatestJobID(jobid1.getId()) .setClusterTemplate(Entities.ClusterTemplateExample.HDFS) .setAccount(tenant1_admin) .setName("cluster2") .setID(cluster2ID) .setLatestJobID(jobid2.getId()) .setClusterTemplate(Entities.ClusterTemplateExample.HDFS) .setAccount(tenant1_admin)
/** * Get a unique job id that can be used for new {@link co.cask.coopr.scheduler.task.ClusterJob}s. * * @param clusterId Id of the cluster the job is for. * @return Unique job id. */ public JobId getNewJobId(String clusterId) { return new JobId(clusterId, generateId(Type.JOB)); }
public ClusterTask(ProvisionerAction taskName, TaskId taskId, String nodeId, String service, ClusterAction clusterAction, String clusterTemplateName, Account account) { this.taskId = taskId.getId(); this.jobId = String.valueOf(taskId.getJobId().getId()); this.clusterId = taskId.getClusterId(); this.taskName = taskName; this.clusterAction = clusterAction; this.nodeId = nodeId; this.service = service; this.attempts = Lists.newArrayList(); //TODO: populate clusterTemplateName and account field for existing tasks: https://issues.cask.co/browse/COOPR-593 this.clusterTemplateName = clusterTemplateName; this.account = account; addAttempt(); }