private void deleteHiveTables(List<String> allHiveTablesNeedToBeDeleted, Map<String, String> segmentId2JobId) throws IOException { final JobEngineConfig engineConfig = new JobEngineConfig(config); final int uuidLength = 36; String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), segmentId2JobId.get(segmentId)) + "/" + tableToDelete; Path externalDataPath = new Path(path);
JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig); conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
jobLock = lock; String serverMode = jobEngineConfig.getConfig().getServerMode(); if (!("job".equals(serverMode.toLowerCase(Locale.ROOT)) || "all".equals(serverMode.toLowerCase(Locale.ROOT)))) { logger.info("server mode: " + serverMode + ", no need to run job scheduler"); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>()); context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig()); logger.info("Finishing resume all running jobs."); int pollSecond = jobEngineConfig.getPollIntervalSecond(); fetcher = jobEngineConfig.getJobPriorityConsidered() ? new PriorityFetcherRunner(jobEngineConfig, context, jobExecutor) : new DefaultFetcherRunner(jobEngineConfig, context, jobExecutor);
public HBaseLookupMRSteps(CubeInstance cube) { this.cube = cube; this.config = new JobEngineConfig(cube.getConfig()); }
public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
ExecutableManager getExecutableManger() { return ExecutableManager.getInstance(jobEngineConfig.getConfig()); } }
JobEngineConfig engineConfig = new JobEngineConfig(config); IJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(iidesc); final String useDatabaseHql = "USE " + engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + ";"; String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); JobInstance.getJobWorkingDir(jobUUID, engineConfig.getHdfsWorkingDirectory()), jobUUID); String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig);
@Test public void testPropertiesHotLoad() throws IOException { KylinConfig baseConfig = KylinConfig.getInstanceFromEnv(); JobEngineConfig jobEngineConfig = new JobEngineConfig(baseConfig); assertEquals(10, jobEngineConfig.getMaxConcurrentJobLimit()); updateProperty("kylin.job.max-concurrent-jobs", "20"); KylinConfig.getInstanceFromEnv().reloadFromSiteProperties(); assertEquals(20, jobEngineConfig.getMaxConcurrentJobLimit()); } }
public static String getJobWorkingDir(JobEngineConfig conf, String jobId) { return getJobWorkingDir(conf.getHdfsWorkingDirectory(), jobId); }
return; executableManager = ExecutableManager.getInstance(jobEngineConfig.getConfig()); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>()); context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig());
private static CubingJob initCubingJob(CubeSegment seg, String jobType, String submitter, JobEngineConfig config) { KylinConfig kylinConfig = config.getConfig(); CubeInstance cube = seg.getCubeInstance(); List<ProjectInstance> projList = ProjectManager.getInstance(kylinConfig).findProjects(cube.getType(), String msg = "Find more than one project containing the cube " + cube.getName() + ". It does't meet the uniqueness requirement!!! "; if (!config.getConfig().allowCubeAppearInMultipleProjects()) { throw new RuntimeException(msg); } else { format.setTimeZone(TimeZone.getTimeZone(config.getTimeZone())); result.setDeployEnvName(kylinConfig.getDeployEnv()); result.setProjectName(projList.get(0).getName());
protected boolean isJobPoolFull() { Map<String, Executable> runningJobs = context.getRunningJobs(); if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) { logger.warn("There are too many jobs running, Job Fetch will wait until next schedule time"); return true; } return false; }
private String getHadoopJobConfFilePath(String suffix, boolean appendSuffix) throws IOException { String hadoopJobConfFile; if (suffix != null && appendSuffix) { hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + suffix.toLowerCase(Locale.ROOT) + ".xml"); } else { hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml"); } File jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { logger.warn("fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); jobConfig = getJobConfig(HADOOP_JOB_CONF_FILENAME + ".xml"); if (jobConfig == null || !jobConfig.exists()) { logger.error("fail to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); throw new RuntimeException("fail to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); } } return OptionsHelper.convertToFileURL(jobConfig.getAbsolutePath()); }
private IIJob initialJob(IISegment seg, String type) { IIJob result = new IIJob(); SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss"); format.setTimeZone(TimeZone.getTimeZone(engineConfig.getTimeZone())); result.setIIName(seg.getIIInstance().getName()); result.setSegmentId(seg.getUuid()); result.setName(seg.getIIInstance().getName() + " - " + seg.getName() + " - " + type + " - " + format.format(new Date(System.currentTimeMillis()))); result.setSubmitter(this.submitter); return result; }
public JobBuilderSupport(CubeSegment seg, String submitter) { Preconditions.checkNotNull(seg, "segment cannot be null"); this.config = new JobEngineConfig(seg.getConfig()); this.seg = seg; this.submitter = submitter; }
public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
public ExecutableManager getExecutableManager() { return ExecutableManager.getInstance(jobEngineConfig.getConfig()); }
protected String getJobWorkingDir(String uuid) { return engineConfig.getHdfsWorkingDirectory() + "/" + JOB_WORKING_DIR_PREFIX + uuid; } }
jobLock.lock(); executableManager = ExecutableManager.getInstance(jobEngineConfig.getConfig()); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>()); context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig());
private static CubingJob initCubingJob(CubeSegment seg, String jobType, String submitter, JobEngineConfig config) { KylinConfig kylinConfig = config.getConfig(); CubeInstance cube = seg.getCubeInstance(); List<ProjectInstance> projList = ProjectManager.getInstance(kylinConfig).findProjects(cube.getType(), String msg = "Find more than one project containing the cube " + cube.getName() + ". It does't meet the uniqueness requirement!!! "; if (!config.getConfig().allowCubeAppearInMultipleProjects()) { throw new RuntimeException(msg); } else { format.setTimeZone(TimeZone.getTimeZone(config.getTimeZone())); result.setDeployEnvName(kylinConfig.getDeployEnv()); result.setProjectName(projList.get(0).getName());