public void parseUserInfo(String userInfo) { String[] info = StringUtil.splitByComma(userInfo); if (info.length > 0) //first element is username this.username = info[0]; for (int i = 1; i < info.length; i++) //the remains should be roles which starts from index 1 this.roles.add(info[i]); }
public void printIssueExistingHTables() { logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------"); for (String segFullName : issueExistHTables) { String[] sepNameList = StringUtil.splitByComma(segFullName); logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!"); } logger.info("----------------------------------------------------"); } }
protected Set<String> getRealizations(KylinConfig config, String cubes, ProjectInstance project) { if (Strings.isNullOrEmpty(cubes)) { return null; } String[] realizations = parseNamesFromCanonicalNames(StringUtil.splitByComma(cubes)); return Sets.newHashSet(realizations); }
/** * parse the lookup snapshot string to lookup snapshot path map. * @param snapshotsString * @return */ public static Map<String, String> parseLookupSnapshots(String snapshotsString) { Map<String, String> lookupSnapshotMap = Maps.newHashMap(); String[] lookupSnapshotEntries = StringUtil.splitByComma(snapshotsString); for (String lookupSnapshotEntryStr : lookupSnapshotEntries) { String[] split = StringUtil.split(lookupSnapshotEntryStr, "="); lookupSnapshotMap.put(split[0], split[1]); } return lookupSnapshotMap; } }
/** * Regenerate table cardinality * * @return Table metadata array * @throws IOException */ @RequestMapping(value = "/{project}/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = { "application/json" }) @ResponseBody public CardinalityRequest generateCardinality(@PathVariable String tableNames, @RequestBody CardinalityRequest request, @PathVariable String project) throws Exception { String submitter = SecurityContextHolder.getContext().getAuthentication().getName(); String[] tables = StringUtil.splitByComma(tableNames); try { for (String table : tables) { tableService.calculateCardinality(table.trim().toUpperCase(Locale.ROOT), submitter, project); } } catch (IOException e) { logger.error("Failed to calculate cardinality", e); throw new InternalErrorException(e.getLocalizedMessage()); } return request; }
@RequestMapping(value = "/{tables}/{project}", method = { RequestMethod.DELETE }, produces = { "application/json" }) @ResponseBody public Map<String, String[]> unLoadHiveTables(@PathVariable String tables, @PathVariable String project) { Set<String> unLoadSuccess = Sets.newHashSet(); Set<String> unLoadFail = Sets.newHashSet(); Map<String, String[]> result = new HashMap<String, String[]>(); try { for (String tableName : StringUtil.splitByComma(tables)) { tableACLService.deleteFromTableACLByTbl(project, tableName); if (tableService.unloadHiveTable(tableName, project)) { unLoadSuccess.add(tableName); } else { unLoadFail.add(tableName); } } } catch (Throwable e) { logger.error("Failed to unload Hive Table", e); throw new InternalErrorException(e.getLocalizedMessage()); } result.put("result.unload.success", (String[]) unLoadSuccess.toArray(new String[unLoadSuccess.size()])); result.put("result.unload.fail", (String[]) unLoadFail.toArray(new String[unLoadFail.size()])); return result; }
public String getPublicConfig() throws IOException { final String whiteListProperties = KylinConfig.getInstanceFromEnv().getPropertiesWhiteList(); Collection<String> propertyKeys = Lists.newArrayList(); if (StringUtils.isNotEmpty(whiteListProperties)) { propertyKeys.addAll(Arrays.asList(StringUtil.splitByComma(whiteListProperties))); } return KylinConfig.getInstanceFromEnv().exportToString(propertyKeys); } }
public void check(List<String> segFullNameList) { issueExistHTables = Lists.newArrayList(); inconsistentHTables = Lists.newArrayList(); for (String segFullName : segFullNameList) { String[] sepNameList = StringUtil.splitByComma(segFullName); try { HTableDescriptor hTableDescriptor = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0])); String host = hTableDescriptor.getValue(IRealizationConstants.HTableTag); if (!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)) { inconsistentHTables.add(segFullName); } } catch (IOException e) { issueExistHTables.add(segFullName); continue; } } }
private void runInitialTasks() { // init metrics system for kylin QueryMetricsFacade.init(); QueryMetrics2Facade.init(); KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); String initTasks = kylinConfig.getInitTasks(); if (!StringUtils.isEmpty(initTasks)) { String[] taskClasses = StringUtil.splitByComma(initTasks); for (String taskClass : taskClasses) { try { InitialTask task = (InitialTask) Class.forName(taskClass).newInstance(); logger.info("Running initial task: " + taskClass); task.execute(); } catch (Throwable e) { logger.error("Initial task failed: " + taskClass, e); } } logger.info("All initial tasks finished."); } } }
public void update() { logger.info("Reloading Cube Metadata from store: " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT)); CubeDescManager cubeDescManager = CubeDescManager.getInstance(config); List<CubeDesc> cubeDescs; if (ArrayUtils.isEmpty(cubeNames)) { cubeDescs = cubeDescManager.listAllDesc(); } else { String[] names = StringUtil.splitByComma(cubeNames[0]); if (ArrayUtils.isEmpty(names)) return; cubeDescs = Lists.newArrayListWithCapacity(names.length); for (String name : names) { cubeDescs.add(cubeDescManager.getCubeDesc(name)); } } for (CubeDesc cubeDesc : cubeDescs) { updateCubeDesc(cubeDesc); } verify(); }
@Override protected void doSetup(Context context) throws IOException, InterruptedException { super.doSetup(context); final SerializableConfiguration sConf = new SerializableConfiguration(context.getConfiguration()); final String metaUrl = context.getConfiguration().get(BatchConstants.ARG_META_URL); final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME); final String segmentIds = context.getConfiguration().get(MergeDictionaryJob.OPTION_MERGE_SEGMENT_IDS.getOpt()); final KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(sConf, metaUrl); final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cubeInstance.getDescName()); mergingSegments = getMergingSegments(cubeInstance, StringUtil.splitByComma(segmentIds)); tblColRefs = cubeDesc.getAllColumnsNeedDictionaryBuilt().toArray(new TblColRef[0]); dictMgr = DictionaryManager.getInstance(kylinConfig); }
public void fixInconsistent() throws IOException { if (ifFix == true) { for (String segFullName : inconsistentHTables) { String[] sepNameList = StringUtil.splitByComma(segFullName); HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0])); logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix()); hbaseAdmin.disableTable(sepNameList[0]); desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix()); hbaseAdmin.modifyTable(sepNameList[0], desc); hbaseAdmin.enableTable(sepNameList[0]); } } else { logger.info("------ Inconsistent HTables Needed To Be Fixed ------"); for (String hTable : inconsistentHTables) { String[] sepNameList = StringUtil.splitByComma(hTable); logger.info(sepNameList[0] + " belonging to cube " + sepNameList[1]); } logger.info("----------------------------------------------------"); } }
realizationNames.addAll(Lists.newArrayList(StringUtil.splitByComma(response.getCube())));
@Test public void splitByCommaTest() { String[] expected = new String[] { "Hello", "Kylin" }; Assert.assertArrayEquals(expected, StringUtil.splitByComma("Hello,Kylin")); } }
for (String projectName : StringUtil.splitByComma(projectNames)) { ProjectInstance projectInstance = projectManager.getProject(projectName); if (projectInstance == null) { for (String cubeName : StringUtil.splitByComma(cubeNames)) { IRealization realization = cubeManager.getRealization(cubeName); if (realization != null) {
for (String projectName : StringUtil.splitByComma(projectNames)) { ProjectInstance projectInstance = projectManager.getProject(projectName); Preconditions.checkNotNull(projectInstance, "Project " + projectName + " does not exist."); for (String cubeName : StringUtil.splitByComma(cubeNames)) { IRealization realization = cubeManager.getRealization(cubeName); if (realization == null) { for (String hybridName : StringUtil.splitByComma(hybridNames)) { IRealization realization = hybridManager.getRealization(hybridName);
metaUrl, segmentId, StringUtil.splitByComma(segmentIds), statOutputPath, tblColRefs, sConf));
public void parseUserInfo(String userInfo) { String[] info = StringUtil.splitByComma(userInfo); if (info.length > 0) //first element is username this.username = info[0]; for (int i = 1; i < info.length; i++) //the remains should be roles which starts from index 1 this.roles.add(info[i]); }
public void printIssueExistingHTables() { logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------"); for (String segFullName : issueExistHTables) { String[] sepNameList = StringUtil.splitByComma(segFullName); logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!"); } logger.info("----------------------------------------------------"); } }
@Override protected void doSetup(Context context) throws IOException, InterruptedException { super.doSetup(context); final SerializableConfiguration sConf = new SerializableConfiguration(context.getConfiguration()); final String metaUrl = context.getConfiguration().get(BatchConstants.ARG_META_URL); final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME); final String segmentIds = context.getConfiguration().get(MergeDictionaryJob.OPTION_MERGE_SEGMENT_IDS.getOpt()); final KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(sConf, metaUrl); final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cubeInstance.getDescName()); mergingSegments = getMergingSegments(cubeInstance, StringUtil.splitByComma(segmentIds)); tblColRefs = cubeDesc.getAllColumnsNeedDictionaryBuilt().toArray(new TblColRef[0]); dictMgr = DictionaryManager.getInstance(kylinConfig); }