@Override public void visit(IConnectorDescriptor conn) throws HyracksException { builder.addConnector(conn); } });
@Override public JobId startJob(JobSpecification jobSpec, EnumSet<JobFlag> jobFlags) throws Exception { IActivityClusterGraphGeneratorFactory jsacggf = new JobSpecificationActivityClusterGraphGeneratorFactory(jobSpec); return startJob(jsacggf, jobFlags); }
private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) { List<V> vList = map.get(key); if (vList == null) { vList = new ArrayList<>(); map.put(key, vList); } extend(vList, index); vList.set(index, value); }
@Override public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(final ICCServiceContext ccServiceCtx, Set<JobFlag> jobFlags) throws HyracksException { final JobActivityGraphBuilder builder = new JobActivityGraphBuilder(spec, jobFlags); PlanUtils.visit(spec, new IConnectorDescriptorVisitor() { @Override public void visit(IConnectorDescriptor conn) throws HyracksException { PlanUtils.visit(spec, new IOperatorDescriptorVisitor() { @Override public void visit(IOperatorDescriptor op) { builder.finish(); final JobActivityGraph jag = builder.getActivityGraph(); ActivityClusterGraphBuilder acgb = new ActivityClusterGraphBuilder(); final ActivityClusterGraph acg = acgb.inferActivityClusters(jag); acg.setFrameSize(spec.getFrameSize()); acg.setMaxReattempts(spec.getMaxReattempts()); PlanUtils.visit(spec, new IOperatorDescriptorVisitor() { @Override public void visit(IOperatorDescriptor op) { PlanUtils.visit(spec, new IConnectorDescriptorVisitor() { @Override public void visit(IConnectorDescriptor conn) {
public static Scheduler initializeHDFSScheduler(ICCServiceContext serviceCtx) throws HyracksDataException { ICCContext ccContext = serviceCtx.getCCContext(); Scheduler scheduler = null; try { scheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(), ccContext.getClusterControllerInfo().getClientNetPort()); } catch (HyracksException e) { throw new RuntimeDataException(ErrorCode.UTIL_HDFS_UTILS_CANNOT_OBTAIN_HDFS_SCHEDULER); } return scheduler; }
while (changed) { changed = false; Pair<ActivityId, ActivityId> pair = findMergePair(jag, stages); if (pair != null) { merge(stageMap, stages, pair.getLeft(), pair.getRight()); changed = true;
public CcId getCcId() { return getNodeParameters().getClusterControllerInfo().getCcId(); }
@Override public void addSourceEdge(int operatorInputIndex, IActivity task, int taskInputIndex) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Adding source edge: " + task.getActivityId() + ":" + operatorInputIndex + " -> " + task.getActivityId() + ":" + taskInputIndex); } IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId()); IConnectorDescriptor conn = jobSpec.getInputConnectorDescriptor(op, operatorInputIndex); insertIntoIndexedMap(jag.getActivityInputMap(), task.getActivityId(), taskInputIndex, conn); connectorConsumerMap.put(conn.getConnectorId(), Pair.of(task, taskInputIndex)); }
public static void visit(JobSpecification spec, IConnectorDescriptorVisitor visitor) throws HyracksException { for (IConnectorDescriptor c : spec.getConnectorMap().values()) { visitor.visit(c); } } }
public static void visit(JobSpecification spec, IOperatorDescriptorVisitor visitor) throws HyracksException { Set<OperatorDescriptorId> seen = new HashSet<>(); for (IOperatorDescriptor op : spec.getOperatorMap().values()) { visitOperator(visitor, seen, op); } }
private static void visitOperator(IOperatorDescriptorVisitor visitor, Set<OperatorDescriptorId> seen, IOperatorDescriptor op) throws HyracksException { if (!seen.contains(op.getOperatorId())) { visitor.visit(op); } seen.add(op.getOperatorId()); }
@Override public void addBlockingEdge(IActivity blocker, IActivity blocked) { addToValueSet(jag.getBlocked2BlockerMap(), blocked.getActivityId(), blocker.getActivityId()); }
int slashIndex = jar.lastIndexOf('/'); String fileName = jar.substring(slashIndex + 1); String url = "http://" + ccHost + ":" + ccInfo.getWebPort() + "/applications/" + deploymentId.toString() + "&" + fileName; HttpPut put = new HttpPut(url);
@Override public void start() throws Exception { LOGGER.log(Level.INFO, "Starting ClusterControllerService: " + this); serverCtx = new ServerContext(ServerContext.ServerType.CLUSTER_CONTROLLER, new File(ccConfig.getRootDir())); IIPCI ccIPCI = new ClusterControllerIPCI(this); clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.getClusterListenPort()), networkSecurityManager.getSocketChannelFactory(), ccIPCI, new CCNCFunctions.SerializerDeserializer()); IIPCI ciIPCI = new ClientInterfaceIPCI(this, jobIdFactory); clientIPC = new IPCSystem(new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()), networkSecurityManager.getSocketChannelFactory(), ciIPCI, new JavaSerializationBasedPayloadSerializerDeserializer()); webServer = new WebServer(this, ccConfig.getConsoleListenPort()); clusterIPC.start(); clientIPC.start(); webServer.start(); info = new ClusterControllerInfo(ccId, ccConfig.getClientPublicAddress(), ccConfig.getClientPublicPort(), ccConfig.getConsolePublicPort()); timer.schedule(sweeper, 0, ccConfig.getDeadNodeSweepThreshold()); jobLog.open(); startApplication(); resultDirectoryService.init(executor); workQueue.start(); connectNCs(); LOGGER.log(Level.INFO, "Started ClusterControllerService"); notifyApplication(); }
@Override public IHyracksClientConnection getHcc() throws HyracksDataException { if (hcc == null || !hcc.isConnected()) { synchronized (this) { if (hcc == null || !hcc.isConnected()) { try { NodeControllerService ncSrv = (NodeControllerService) ncServiceContext.getControllerService(); // TODO(mblow): multicc CcId primaryCcId = ncSrv.getPrimaryCcId(); ClusterControllerInfo ccInfo = ncSrv.getNodeParameters(primaryCcId).getClusterControllerInfo(); hcc = new HyracksConnection(ccInfo.getClientNetAddress(), ccInfo.getClientNetPort(), ncSrv.getNetworkSecurityManager().getSocketChannelFactory()); } catch (Exception e) { throw HyracksDataException.create(e); } } } } return hcc; }
@Override public DeployedJobSpecId deployJobSpec(JobSpecification jobSpec) throws Exception { JobSpecificationActivityClusterGraphGeneratorFactory jsacggf = new JobSpecificationActivityClusterGraphGeneratorFactory(jobSpec); return deployJobSpec(jsacggf); }
private CcConnection getPendingNodeRegistration(NodeParameters nodeParameters) { CcConnection ccConnection = pendingRegistrations.remove(nodeParameters.getRegistrationId()); if (ccConnection == null) { throw new IllegalStateException("Unknown pending node registration " + nodeParameters.getRegistrationId() + " for " + nodeParameters.getClusterControllerInfo().getCcId()); } return ccConnection; }
@Override public void addTargetEdge(int operatorOutputIndex, IActivity task, int taskOutputIndex) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Adding target edge: " + task.getActivityId() + ":" + operatorOutputIndex + " -> " + task.getActivityId() + ":" + taskOutputIndex); } IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId()); IConnectorDescriptor conn = jobSpec.getOutputConnectorDescriptor(op, operatorOutputIndex); insertIntoIndexedMap(jag.getActivityOutputMap(), task.getActivityId(), taskOutputIndex, conn); connectorProducerMap.put(conn.getConnectorId(), Pair.of(task, taskOutputIndex)); }
@Override public JobId startJob(DeploymentId deploymentId, JobSpecification jobSpec, EnumSet<JobFlag> jobFlags) throws Exception { IActivityClusterGraphGeneratorFactory jsacggf = new JobSpecificationActivityClusterGraphGeneratorFactory(jobSpec); return startJob(deploymentId, jsacggf, jobFlags); }
@Override public void redeployJobSpec(DeployedJobSpecId deployedJobSpecId, JobSpecification jobSpec) throws Exception { JobSpecificationActivityClusterGraphGeneratorFactory jsacggf = new JobSpecificationActivityClusterGraphGeneratorFactory(jobSpec); hci.redeployJobSpec(deployedJobSpecId, JavaSerializationUtils.serialize(jsacggf)); }