Tabnine Logo
ExecutableContext.getConfig
Code IndexAdd Tabnine to your IDE (free)

How to use
getConfig
method
in
org.apache.kylin.job.execution.ExecutableContext

Best Java code snippets using org.apache.kylin.job.execution.ExecutableContext.getConfig (Showing top 20 results out of 315)

origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  KylinConfig config = context.getConfig();
  StringBuffer output = new StringBuffer();
  try {
    output.append(cleanUpIntermediateFlatTable(config));
    // don't drop view to avoid concurrent issue
    //output.append(cleanUpHiveViewIntermediateTable(config));
  } catch (IOException e) {
    logger.error("job:" + getId() + " execute finished with exception", e);
    return ExecuteResult.createError(e);
  }
  return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString());
}
origin: apache/kylin

protected void handleMetadataPersistException(ExecutableContext context, Throwable exception) {
  final String[] adminDls = context.getConfig().getAdminDls();
  if (adminDls == null || adminDls.length < 1) {
    logger.warn(NO_NEED_TO_SEND_EMAIL_USER_LIST_IS_EMPTY);
    return;
  }
  List<String> users = Lists.newArrayList(adminDls);
  Map<String, Object> dataMap = Maps.newHashMap();
  dataMap.put("job_name", getName());
  dataMap.put("env_name", context.getConfig().getDeployEnv());
  dataMap.put(SUBMITTER, StringUtil.noBlank(getSubmitter(), "missing submitter"));
  dataMap.put("job_engine", MailNotificationUtil.getLocalHostName());
  dataMap.put("error_log",
      Matcher.quoteReplacement(StringUtil.noBlank(exception.getMessage(), "no error message")));
  String content = MailNotificationUtil.getMailContent(MailNotificationUtil.METADATA_PERSIST_FAIL, dataMap);
  String title = MailNotificationUtil.getMailTitle("METADATA PERSIST", "FAIL",
      context.getConfig().getDeployEnv());
  new MailService(context.getConfig()).sendMail(users, title, content);
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  try {
    config = new JobEngineConfig(context.getConfig());
    List<String> toDeletePaths = getDeletePaths();
    dropHdfsPathOnCluster(toDeletePaths, HadoopUtil.getWorkingFileSystem());
    if (StringUtils.isNotEmpty(context.getConfig().getHBaseClusterFs())) {
      dropHdfsPathOnCluster(toDeletePaths, FileSystem.get(HBaseConnection.getCurrentHBaseConfiguration()));
    }
  } catch (IOException e) {
    logger.error("job:" + getId() + " execute finished with exception", e);
    output.append("\n").append(e.getLocalizedMessage());
    return new ExecuteResult(ExecuteResult.State.ERROR, output.toString(), e);
  }
  return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString());
}
origin: apache/kylin

public static CubeSegment findSegment(ExecutableContext context, String cubeName, String segmentId) {
  final CubeManager mgr = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = mgr.getCube(cubeName);
  if (cube == null) {
    String cubeList = StringUtils
        .join(Iterables.transform(mgr.listAllCubes(), new Function<CubeInstance, String>() {
          @Nullable
          @Override
          public String apply(@Nullable CubeInstance input) {
            return input.getName();
          }
        }).iterator(), ",");
    throw new IllegalStateException("target cube name: " + cubeName + " cube list: " + cubeList);
  }
  final CubeSegment newSegment = cube.getSegmentById(segmentId);
  if (newSegment == null) {
    String segmentList = StringUtils
        .join(Iterables.transform(cube.getSegments(), new Function<CubeSegment, String>() {
          @Nullable
          @Override
          public String apply(@Nullable CubeSegment input) {
            return input.getUuid();
          }
        }).iterator(), ",");
    throw new IllegalStateException("target segment id: " + segmentId + " segment list: " + segmentList);
  }
  return newSegment;
}
origin: apache/kylin

private void onExecuteFinishedWithRetry(ExecuteResult result, ExecutableContext executableContext)
    throws ExecuteException {
  Throwable exception;
  int nRetry = 0;
  do {
    nRetry++;
    exception = null;
    try {
      onExecuteFinished(result, executableContext);
    } catch (Exception e) {
      logger.error(nRetry + "th retries for onExecuteFinished fails due to {}", e);
      if (isMetaDataPersistException(e, 5)) {
        exception = e;
        try {
          Thread.sleep(1000L * (long) Math.pow(4, nRetry));
        } catch (InterruptedException e1) {
          throw new IllegalStateException(e1);
        }
      } else {
        throw e;
      }
    }
  } while (exception != null && nRetry <= executableContext.getConfig().getJobMetadataPersistRetry());
  if (exception != null) {
    handleMetadataPersistException(executableContext, exception);
    throw new ExecuteException(exception);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
  Set<Long> recommendCuboids = cube.getCuboidsRecommend();
  try {
    List<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
    Map<Long, Long> recommendCuboidsWithStats = CuboidStatsReaderUtil
        .readCuboidStatsFromSegments(recommendCuboids, newSegments);
    if (recommendCuboidsWithStats == null) {
      throw new RuntimeException("Fail to get statistics info for recommended cuboids after optimization!!!");
    }
    cubeManager.promoteCheckpointOptimizeSegments(cube, recommendCuboidsWithStats,
        newSegments.toArray(new CubeSegment[newSegments.size()]));
    return new ExecuteResult();
  } catch (Exception e) {
    logger.error("fail to update cube after build", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

overwriteJobConf(conf, executableContext.getConfig(), getMapReduceParams().trim().split("\\s+"));
Job job = new Cluster(conf).getJob(JobID.forName(mrJobId));
if (job == null || job.getJobState() == JobStatus.State.FAILED) {
origin: apache/kylin

@Override
protected Pair<String, String> formatNotifications(ExecutableContext context, ExecutableState state) {
  CubeInstance cubeInstance = CubeManager.getInstance(context.getConfig())
      .getCube(CubingExecutableUtil.getCubeName(this.getParams()));
  final Output output = getManager().getOutput(getId());
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager mgr = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
  final CubeSegment optimizeSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
origin: apache/kylin

  @Override
  protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager mgr = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
    final CubeSegment optimizeSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubeSegment oldSegment = optimizeSegment.getCubeInstance().getOriginalSegmentToOptimize(optimizeSegment);
    Preconditions.checkNotNull(oldSegment,
        "cannot find the original segment to be optimized by " + optimizeSegment);

    // --- Copy dictionary
    optimizeSegment.getDictionaries().putAll(oldSegment.getDictionaries());
    optimizeSegment.getSnapshots().putAll(oldSegment.getSnapshots());
    optimizeSegment.getRowkeyStats().addAll(oldSegment.getRowkeyStats());

    try {
      CubeUpdate cubeBuilder = new CubeUpdate(cube);
      cubeBuilder.setToUpdateSegs(optimizeSegment);
      mgr.updateCube(cubeBuilder);
    } catch (IOException e) {
      logger.error("fail to merge dictionary or lookup snapshots", e);
      return ExecuteResult.createError(e);
    }

    return new ExecuteResult();
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  KylinConfig kylinConfig = context.getConfig();
  CubeManager cubeManager = CubeManager.getInstance(kylinConfig);
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeMgr = CubeManager.getInstance(context.getConfig());
  final DictionaryManager dictMgrHdfs;
  final DictionaryManager dictMgrHbase;
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  try {
    logger.info("executing:" + getCmd());
    final PatternedLogger patternedLogger = new PatternedLogger(logger);
    final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger);
    getManager().addJobInfo(getId(), patternedLogger.getInfo());
    return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond())
        : ExecuteResult.createFailed(new ShellException(result.getSecond()));
  } catch (IOException e) {
    logger.error("job:" + getId() + " execute finished with exception", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager mgr = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
  final CubeSegment newSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
  final List<CubeSegment> mergingSegments = getMergingSegments(cube);
  KylinConfig conf = cube.getConfig();
  Collections.sort(mergingSegments);
  try {
    checkLookupSnapshotsMustIncremental(mergingSegments);
    // work on copy instead of cached objects
    CubeInstance cubeCopy = cube.latestCopyForWrite();
    CubeSegment newSegCopy = cubeCopy.getSegmentById(newSegment.getUuid());
    
    makeDictForNewSegment(conf, cubeCopy, newSegCopy, mergingSegments);
    makeSnapshotForNewSegment(cubeCopy, newSegCopy, mergingSegments);
    CubeUpdate update = new CubeUpdate(cubeCopy);
    update.setToUpdateSegs(newSegCopy);
    mgr.updateCube(update);
    return ExecuteResult.createSucceed();
  } catch (IOException e) {
    logger.error("fail to merge dictionary or lookup snapshots", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) {
  super.onExecuteFinished(result, executableContext);
  if (!isDiscarded() && result.succeed()) {
    List<? extends Executable> jobs = getTasks();
    boolean allSucceed = true;
    for (Executable task : jobs) {
      final ExecutableState status = task.getStatus();
      if (status != ExecutableState.SUCCEED) {
        allSucceed = false;
      }
    }
    if (allSucceed) {
      // Add last optimization time
      CubeManager cubeManager = CubeManager.getInstance(executableContext.getConfig());
      CubeInstance cube = cubeManager.getCube(getCubeName());
      CubeInstance copyForWrite = cube.latestCopyForWrite();
      try {
        copyForWrite.setCuboidLastOptimized(getEndTime());
        CubeUpdate cubeUpdate = new CubeUpdate(copyForWrite);
        cubeManager.updateCube(cubeUpdate);
      } catch (IOException e) {
        logger.error("Failed to update last optimized for " + getCubeName(), e);
      }
    }
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  KylinConfig kylinConfig = context.getConfig();
  CubeManager cubeManager = CubeManager.getInstance(kylinConfig);
  TableMetadataManager metaMgr = TableMetadataManager.getInstance(kylinConfig);
  SnapshotManager snapshotMgr = SnapshotManager.getInstance(kylinConfig);
  CubeInstance cube = cubeManager.getCube(LookupExecutableUtil.getCubeName(this.getParams()));
  List<String> segmentIDs = LookupExecutableUtil.getSegments(this.getParams());
  String lookupTableName = LookupExecutableUtil.getLookupTableName(this.getParams());
  CubeDesc cubeDesc = cube.getDescriptor();
  try {
    TableDesc tableDesc = metaMgr.getTableDesc(lookupTableName, cube.getProject());
    IReadableTable hiveTable = SourceManager.createReadableTable(tableDesc, null);
    logger.info("take snapshot for table:" + lookupTableName);
    SnapshotTable snapshot = snapshotMgr.buildSnapshot(hiveTable, tableDesc, cube.getConfig());
    logger.info("update snapshot path to cube metadata");
    if (cubeDesc.isGlobalSnapshotTable(lookupTableName)) {
      LookupExecutableUtil.updateSnapshotPathToCube(cubeManager, cube, lookupTableName,
          snapshot.getResourcePath());
    } else {
      LookupExecutableUtil.updateSnapshotPathToSegments(cubeManager, cube, segmentIDs, lookupTableName,
          snapshot.getResourcePath());
    }
    return new ExecuteResult();
  } catch (IOException e) {
    logger.error("fail to build snapshot for:" + lookupTableName, e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
  final CubeInstance cubeCopy = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
  final String segmentId = CubingExecutableUtil.getSegmentId(this.getParams());
  final CubeSegment segCopy = cubeCopy.getSegmentById(segmentId);
  Preconditions.checkNotNull(segCopy, "Cube segment '" + segmentId + "' not found.");
  Segments<CubeSegment> mergingSegs = cubeCopy.getMergingSegments(segCopy);
  Preconditions.checkArgument(mergingSegs.size() > 0, "Merging segment not exist.");
  Collections.sort(mergingSegs);
  final CubeSegment first = mergingSegs.get(0);
  final CubeSegment last = mergingSegs.get(mergingSegs.size() - 1);
  segCopy.setSegRange(new SegmentRange(first.getSegRange().start, last.getSegRange().end));
  segCopy.setSourcePartitionOffsetStart(first.getSourcePartitionOffsetStart());
  segCopy.setSourcePartitionOffsetEnd(last.getSourcePartitionOffsetEnd());
  segCopy.setTSRange(new TSRange(mergingSegs.getTSStart(), mergingSegs.getTSEnd()));
  CubeUpdate update = new CubeUpdate(cubeCopy);
  update.setToUpdateSegs(segCopy);
  try {
    cubeManager.updateCube(update);
    return ExecuteResult.createSucceed();
  } catch (IOException e) {
    logger.error("fail to update cube segment offset", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
  final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
  CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(segment);
  long sourceCount = originalSegment.getInputRecords();
  long sourceSizeBytes = originalSegment.getInputRecordsSize();
  CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
  long cubeSizeBytes = cubingJob.findCubeSizeBytes();
  segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams()));
  segment.setLastBuildTime(System.currentTimeMillis());
  segment.setSizeKB(cubeSizeBytes / 1024);
  segment.setInputRecords(sourceCount);
  segment.setInputRecordsSize(sourceSizeBytes);
  segment.setDimensionRangeInfoMap(originalSegment.getDimensionRangeInfoMap());
  try {
    cubeManager.promoteNewlyOptimizeSegments(cube, segment);
    return new ExecuteResult();
  } catch (IOException e) {
    logger.error("fail to update cube after build", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()))
      .latestCopyForWrite();
  final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
  CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
  long sourceCount = cubingJob.findSourceRecordCount();
  long sourceSizeBytes = cubingJob.findSourceSizeBytes();
  long cubeSizeBytes = cubingJob.findCubeSizeBytes();
  segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams()));
  segment.setLastBuildTime(System.currentTimeMillis());
  segment.setSizeKB(cubeSizeBytes / 1024);
  segment.setInputRecords(sourceCount);
  segment.setInputRecordsSize(sourceSizeBytes);
  try {
    saveExtSnapshotIfNeeded(cubeManager, cube, segment);
    updateSegment(segment);
    cubeManager.promoteNewlyBuiltSegments(cube, segment);
    return new ExecuteResult();
  } catch (IOException e) {
    logger.error("fail to update cube after build", e);
    return ExecuteResult.createError(e);
  }
}
origin: apache/kylin

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
  final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
  final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
org.apache.kylin.job.executionExecutableContextgetConfig

Popular methods of ExecutableContext

    Popular in Java

    • Creating JSON documents from java classes using gson
    • getSharedPreferences (Context)
    • getContentResolver (Context)
    • getExternalFilesDir (Context)
    • Kernel (java.awt.image)
    • FileWriter (java.io)
      A specialized Writer that writes to a file in the file system. All write requests made by calling me
    • Dictionary (java.util)
      Note: Do not use this class since it is obsolete. Please use the Map interface for new implementatio
    • Servlet (javax.servlet)
      Defines methods that all servlets must implement. A servlet is a small Java program that runs within
    • JFileChooser (javax.swing)
    • JPanel (javax.swing)
    • Top 12 Jupyter Notebook extensions
    Tabnine Logo
    • Products

      Search for Java codeSearch for JavaScript code
    • IDE Plugins

      IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
    • Company

      About UsContact UsCareers
    • Resources

      FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
    Get Tabnine for your IDE now