public static ExecuteResult createSucceed() { return new ExecuteResult(State.SUCCEED, "succeed"); }
public static ExecuteResult createFailed(Throwable throwable) { Preconditions.checkArgument(throwable != null, "throwable cannot be null"); return new ExecuteResult(State.FAILED, throwable.getLocalizedMessage(), throwable); }
public static ExecuteResult createError(Throwable throwable) { Preconditions.checkArgument(throwable != null, "throwable cannot be null"); return new ExecuteResult(State.ERROR, throwable.getLocalizedMessage(), throwable); }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { return new ExecuteResult(ExecuteResult.State.SUCCEED, ""); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { Thread.sleep(1000); } catch (InterruptedException e) { } return new ExecuteResult(); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { Thread.sleep(1000); } catch (InterruptedException e) { } return new ExecuteResult(ExecuteResult.State.FAILED, "failed"); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { KylinConfig config = KylinConfig.getInstanceFromEnv(); try { createFlatHiveTable(config); return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog()); } catch (Exception e) { logger.error("job:" + getId() + " execute finished with exception", e); return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { KylinConfig config = KylinConfig.getInstanceFromEnv(); try { sqoopFlatHiveTable(config); return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog()); } catch (Exception e) { logger.error("job:" + getId() + " execute finished with exception", e); return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { KylinConfig config = KylinConfig.getInstanceFromEnv(); try { sqoopFlatHiveTable(config); return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog()); } catch (Exception e) { logger.error("job:" + getId() + " execute finished with exception", e); return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { KylinConfig config = getCubeSpecificConfig(); try { createFlatHiveTable(config); return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog()); } catch (Exception e) { logger.error("job:" + getId() + " execute finished with exception", e); return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { KylinConfig config = context.getConfig(); StringBuffer output = new StringBuffer(); try { output.append(cleanUpIntermediateFlatTable(config)); // don't drop view to avoid concurrent issue //output.append(cleanUpHiveViewIntermediateTable(config)); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); return ExecuteResult.createError(e); } return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString()); }
private ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException { Map<String, String> info = new HashMap<>(); try { logger.info("spark_job_id:" + appId + " resumed"); info.put(ExecutableConstants.SPARK_JOB_ID, appId); while (!isPaused() && !isDiscarded()) { String status = getAppState(appId); if (status.equals("FAILED") || status.equals("KILLED")) { mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed"); return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed"); } if (status.equals("SUCCEEDED")) { mgr.addJobInfo(getId(), info); return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished"); } Thread.sleep(5000); } killAppRetry(appId); if (isDiscarded()) { return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded"); } else { return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped"); } } catch (Exception e) { logger.error("error run spark job:", e); return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { config = new JobEngineConfig(context.getConfig()); List<String> toDeletePaths = getDeletePaths(); dropHdfsPathOnCluster(toDeletePaths, HadoopUtil.getWorkingFileSystem()); if (StringUtils.isNotEmpty(context.getConfig().getHBaseClusterFs())) { dropHdfsPathOnCluster(toDeletePaths, FileSystem.get(HBaseConnection.getCurrentHBaseConfiguration())); } } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); output.append("\n").append(e.getLocalizedMessage()); return new ExecuteResult(ExecuteResult.State.ERROR, output.toString(), e); } return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString()); }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { doingWork = true; try { for (int i = 0; i < 60; i++) { sleepOneSecond(); if (isDiscarded()) return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped"); } return new ExecuteResult(); } finally { doingWork = false; } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { List<? extends Executable> executables = getTasks(); final int size = executables.size(); for (int i = 0; i < size; ++i) { Executable subTask = executables.get(i); ExecutableState state = subTask.getStatus(); if (state == ExecutableState.RUNNING) { // there is already running subtask, no need to start a new subtask break; } else if (state == ExecutableState.STOPPED) { // the job is paused break; } else if (state == ExecutableState.ERROR) { throw new IllegalStateException( "invalid subtask state, subtask:" + subTask.getName() + ", state:" + subTask.getStatus()); } if (subTask.isRunnable()) { return subTask.execute(context); } } return new ExecuteResult(ExecuteResult.State.SUCCEED); }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { final CubeManager cubeManager = CubeManager.getInstance(context.getConfig()); final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())); Set<Long> recommendCuboids = cube.getCuboidsRecommend(); try { List<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING); Map<Long, Long> recommendCuboidsWithStats = CuboidStatsReaderUtil .readCuboidStatsFromSegments(recommendCuboids, newSegments); if (recommendCuboidsWithStats == null) { throw new RuntimeException("Fail to get statistics info for recommended cuboids after optimization!!!"); } cubeManager.promoteCheckpointOptimizeSegments(cube, recommendCuboidsWithStats, newSegments.toArray(new CubeSegment[newSegments.size()])); return new ExecuteResult(); } catch (Exception e) { logger.error("fail to update cube after build", e); return ExecuteResult.createError(e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { final CubeManager mgr = CubeManager.getInstance(context.getConfig()); final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite(); final CubeSegment optimizeSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams())); CubeSegment oldSegment = optimizeSegment.getCubeInstance().getOriginalSegmentToOptimize(optimizeSegment); Preconditions.checkNotNull(oldSegment, "cannot find the original segment to be optimized by " + optimizeSegment); // --- Copy dictionary optimizeSegment.getDictionaries().putAll(oldSegment.getDictionaries()); optimizeSegment.getSnapshots().putAll(oldSegment.getSnapshots()); optimizeSegment.getRowkeyStats().addAll(oldSegment.getRowkeyStats()); try { CubeUpdate cubeBuilder = new CubeUpdate(cube); cubeBuilder.setToUpdateSegs(optimizeSegment); mgr.updateCube(cubeBuilder); } catch (IOException e) { logger.error("fail to merge dictionary or lookup snapshots", e); return ExecuteResult.createError(e); } return new ExecuteResult(); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { logger.info("executing:" + getCmd()); final PatternedLogger patternedLogger = new PatternedLogger(logger); final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger); getManager().addJobInfo(getId(), patternedLogger.getInfo()); return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond()) : ExecuteResult.createFailed(new ShellException(result.getSecond())); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); return ExecuteResult.createError(e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { final CubeManager cubeManager = CubeManager.getInstance(context.getConfig()); final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())); final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams())); CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(segment); long sourceCount = originalSegment.getInputRecords(); long sourceSizeBytes = originalSegment.getInputRecordsSize(); CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams())); long cubeSizeBytes = cubingJob.findCubeSizeBytes(); segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams())); segment.setLastBuildTime(System.currentTimeMillis()); segment.setSizeKB(cubeSizeBytes / 1024); segment.setInputRecords(sourceCount); segment.setInputRecordsSize(sourceSizeBytes); segment.setDimensionRangeInfoMap(originalSegment.getDimensionRangeInfoMap()); try { cubeManager.promoteNewlyOptimizeSegments(cube, segment); return new ExecuteResult(); } catch (IOException e) { logger.error("fail to update cube after build", e); return ExecuteResult.createError(e); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { final CubeManager cubeManager = CubeManager.getInstance(context.getConfig()); final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())) .latestCopyForWrite(); final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams())); CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams())); long sourceCount = cubingJob.findSourceRecordCount(); long sourceSizeBytes = cubingJob.findSourceSizeBytes(); long cubeSizeBytes = cubingJob.findCubeSizeBytes(); segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams())); segment.setLastBuildTime(System.currentTimeMillis()); segment.setSizeKB(cubeSizeBytes / 1024); segment.setInputRecords(sourceCount); segment.setInputRecordsSize(sourceSizeBytes); try { saveExtSnapshotIfNeeded(cubeManager, cube, segment); updateSegment(segment); cubeManager.promoteNewlyBuiltSegments(cube, segment); return new ExecuteResult(); } catch (IOException e) { logger.error("fail to update cube after build", e); return ExecuteResult.createError(e); } }