/** * Batch merge of {@link ZNRecordDelta} * @see #merge(ZNRecordDelta) * @param deltaList */ void merge(List<ZNRecordDelta> deltaList) { for (ZNRecordDelta delta : deltaList) { merge(delta); } }
@Override public ZNRecord update(ZNRecord current) { if (current != null) { current.merge(_record); return current; } return _record; } }
@Override public ZNRecord update(ZNRecord currentData) { if (currentData != null && mergeOnUpdate) { currentData.merge(record); return currentData; } return record; } };
@Override public ZNRecord update(ZNRecord current) { if (current != null) { current.merge(_record); return current; } return _record; } }
@Override public ZNRecord update(ZNRecord currentData) { if (currentData != null && mergeOnUpdate) { currentData.merge(record); return currentData; } return record; } };
/** * Batch merge of {@link ZNRecordDelta} * @see #merge(ZNRecordDelta) * @param deltaList */ void merge(List<ZNRecordDelta> deltaList) { for (ZNRecordDelta delta : deltaList) { merge(delta); } }
public void merge(CurrentState curState) { _curStateDelta.getRecord().merge(curState.getRecord()); } }
public void merge(CurrentState curState) { _curStateDelta.getRecord().merge(curState.getRecord()); } }
public void merge(CurrentState anotherDelta) { _delta.getRecord().merge(anotherDelta.getRecord()); }
public void merge(CurrentState anotherDelta) { _delta.getRecord().merge(anotherDelta.getRecord()); }
/** * Merge a list of ZNRecords into a single ZNRecord * @param records * @return {@link ZNRecord} */ public ZNRecord assemble(List<ZNRecord> records) { ZNRecord assembledRecord = null; if (records != null && records.size() > 0) { for (ZNRecord record : records) { if (record == null) { continue; } if (assembledRecord == null) { assembledRecord = new ZNRecord(record.getId()); } assembledRecord.merge(record); } } return assembledRecord; }
/** * Merge a list of ZNRecords into a single ZNRecord * @param records * @return {@link ZNRecord} */ public ZNRecord assemble(List<ZNRecord> records) { ZNRecord assembledRecord = null; if (records != null && records.size() > 0) { for (ZNRecord record : records) { if (record == null) { continue; } if (assembledRecord == null) { assembledRecord = new ZNRecord(record.getId()); } assembledRecord.merge(record); } } return assembledRecord; }
if (record.getDeltaList().size() > 0) { ZNRecord value = new ZNRecord(record.getId()); value.merge(record); client.create(path, value, mode); } else {
@Override public ZNRecord update(ZNRecord currentData) { ZNRecord newRec = new ZNRecord(message.getResourceName()); if (currentData != null) { int currentGen = convertToInt(newRec.getSimpleField("currentGen"), 0); int currentGenStartSeq = convertToInt(newRec.getSimpleField("currentGenStartSeq"), 0); int prevGen = convertToInt(newRec.getSimpleField("prevGen"), 0); int prevGenEndSeq = convertToInt(newRec.getSimpleField("prevGenEndSeq"), 0); newRec.setSimpleField("currentGen", Integer.toString(currentGen + 1)); newRec.setSimpleField("currentGenStartSeq", Integer.toString(1)); if (currentGen > 0) { newRec.setSimpleField("prevGen", Integer.toString(currentGen)); int localEndSeq = 1; if (lastRecordProcessed != null) { localEndSeq = (int) lastRecordProcessed.txid; } newRec.setSimpleField("prevGenEndSeq", "" + localEndSeq); } newRec.merge(currentData); } else { newRec.setSimpleField("currentGen", Integer.toString(1)); newRec.setSimpleField("currentGenStartSeq", Integer.toString(1)); } return newRec; }
/** * Merge in a {@link ZNRecordDelta} corresponding to its merge policy * @param delta */ void merge(ZNRecordDelta delta) { if (delta.getMergeOperation() == MergeOperation.ADD) { merge(delta.getRecord()); } else if (delta.getMergeOperation() == MergeOperation.SUBTRACT) { subtract(delta.getRecord()); } else if (delta.getMergeOperation() == MergeOperation.UPDATE) { update(delta.getRecord()); } }
/** * Merge in a {@link ZNRecordDelta} corresponding to its merge policy * @param delta */ void merge(ZNRecordDelta delta) { if (delta.getMergeOperation() == MergeOperation.ADD) { merge(delta.getRecord()); } else if (delta.getMergeOperation() == MergeOperation.SUBTRACT) { subtract(delta.getRecord()); } else if (delta.getMergeOperation() == MergeOperation.UPDATE) { update(delta.getRecord()); } }
StringRepresentation getHostedEntitiesRepresentation(String clusterName, String jobQueueName) throws Exception { ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT); HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName); PropertyKey.Builder keyBuilder = accessor.keyBuilder(); TaskDriver taskDriver = new TaskDriver(zkClient, clusterName); // Get job queue config // TODO: fix this to use workflowConfig. ResourceConfig jobQueueConfig = accessor.getProperty(keyBuilder.resourceConfig(jobQueueName)); // Get job queue context WorkflowContext ctx = taskDriver.getWorkflowContext(jobQueueName); // Create the result ZNRecord hostedEntitiesRecord = new ZNRecord(jobQueueName); if (jobQueueConfig != null) { hostedEntitiesRecord.merge(jobQueueConfig.getRecord()); } if (ctx != null) { hostedEntitiesRecord.merge(ctx.getRecord()); } StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord), MediaType.APPLICATION_JSON); return representation; }
@Test public void testJobStateOnCreation() { Workflow.Builder builder = new Workflow.Builder(WORKFLOW_NAME); JobConfig.Builder jobConfigBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WORKFLOW_NAME).setTargetPartitionStates(Sets.newHashSet("SLAVE","MASTER")) .setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG); String jobName = "job"; builder = builder.addJob(jobName, jobConfigBuilder); Workflow workflow = builder.build(); WorkflowConfig workflowConfig = workflow.getWorkflowConfig(); JobConfig jobConfig = jobConfigBuilder.build(); workflowConfig.getRecord().merge(jobConfig.getRecord()); _cache.getJobConfigMap().put(WORKFLOW_NAME + "_" + jobName, jobConfig); _cache.getWorkflowConfigMap().put(WORKFLOW_NAME, workflowConfig); WorkflowRebalancer workflowRebalancer = new WorkflowRebalancer(); workflowRebalancer.init(_manager); ResourceAssignment resourceAssignment = workflowRebalancer .computeBestPossiblePartitionState(_cache, _idealState, _resource, _currStateOutput); WorkflowContext workflowContext = _cache.getWorkflowContext(WORKFLOW_NAME); Map<String, TaskState> jobStates = workflowContext.getJobStates(); for (String job : jobStates.keySet()) { Assert.assertEquals(jobStates.get(job), TaskState.NOT_STARTED); } } }