@Override public ITupleIterator search(StorageContext context, SQLDigest sqlDigest) { String tableName = seg.getStorageLocationIdentifier(); //HConnection is cached, so need not be closed HConnection conn = HBaseConnection.get(context.getConnUrl()); try { return new EndpointTupleIterator(seg, sqlDigest.filter, sqlDigest.groupbyColumns, new ArrayList<>(sqlDigest.aggregations), context, conn); } catch (Throwable e) { e.printStackTrace(); throw new IllegalStateException("Error when connecting to II htable " + tableName, e); } } }
private static List<String> getHTableNames(KylinConfig config) { CubeManager cubeMgr = CubeManager.getInstance(config); ArrayList<String> result = new ArrayList<String>(); for (CubeInstance cube : cubeMgr.listAllCubes()) { for (CubeSegment seg : cube.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) { for (IISegment seg : ii.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } return result; } }
private HadoopShellExecutable createCreateHTableStep(IISegment seg) { HadoopShellExecutable createHtableStep = new HadoopShellExecutable(); createHtableStep.setName(ExecutableConstants.STEP_NAME_CREATE_HBASE_TABLE); StringBuilder cmd = new StringBuilder(); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); createHtableStep.setJobParams(cmd.toString()); createHtableStep.setJobClass(IICreateHTableJob.class); return createHtableStep; }
private HadoopShellExecutable createBulkLoadStep(IISegment seg, String jobId) { HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable(); bulkLoadStep.setName(ExecutableConstants.STEP_NAME_BULK_LOAD_HFILE); StringBuilder cmd = new StringBuilder(); appendExecCmdParameters(cmd, "input", getHFilePath(seg, jobId)); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); bulkLoadStep.setJobParams(cmd.toString()); bulkLoadStep.setJobClass(IIBulkLoadJob.class); return bulkLoadStep; }
String tablename = seg.getStorageLocationIdentifier(); allTablesNeedToBeDropped.remove(tablename); log.info("Remove table " + tablename + " from drop list, as the table belongs to ii " + ii.getName() + " with status " + ii.getStatus());
@Test public void testLoad() throws Exception { String tableName = seg.getStorageLocationIdentifier(); IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest()); List<Slice> slices = Lists.newArrayList(); HBaseClientKVIterator kvIterator = new HBaseClientKVIterator(hconn, tableName, IIDesc.HBASE_FAMILY_BYTES, IIDesc.HBASE_QUALIFIER_BYTES); try { for (Slice slice : codec.decodeKeyValue(kvIterator)) { slices.add(slice); } } finally { kvIterator.close(); } List<TableRecord> records = iterateRecords(slices); dump(records); System.out.println(records.size() + " records"); }
private MapReduceExecutable createConvertToHfileStep(IISegment seg, String inputPath, String jobId) { MapReduceExecutable createHFilesStep = new MapReduceExecutable(); createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE); StringBuilder cmd = new StringBuilder(); appendMapReduceParameters(cmd, engineConfig); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "input", inputPath); appendExecCmdParameters(cmd, "output", getHFilePath(seg, jobId)); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + seg.getIIInstance().getName() + "_Step"); createHFilesStep.setMapReduceParams(cmd.toString()); createHFilesStep.setMapReduceJobClass(IICreateHFileJob.class); return createHFilesStep; }
public EndpointTupleIterator(IISegment segment, TupleFilter rootFilter, Collection<TblColRef> groupBy, List<FunctionDesc> measures, StorageContext context, HConnection conn) throws Throwable { String tableName = segment.getStorageLocationIdentifier(); table = conn.getTable(tableName); factTableName = segment.getIIDesc().getFactTableName();
private static List<String> getHTableNames(KylinConfig config) { CubeManager cubeMgr = CubeManager.getInstance(config); ArrayList<String> result = new ArrayList<String>(); for (CubeInstance cube : cubeMgr.listAllCubes()) { for (CubeSegment seg : cube.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) { for (IISegment seg : ii.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } return result; } }
private synchronized IIInstance reloadIILocalAt(String path) throws IOException { ResourceStore store = getStore(); logger.debug("Loading IIInstance " + store.getReadableResourcePath(path)); IIInstance ii = null; try { ii = store.getResource(path, IIInstance.class, II_SERIALIZER); ii.setConfig(config); if (StringUtils.isBlank(ii.getName())) throw new IllegalStateException("IIInstance name must not be blank"); iiMap.putLocal(ii.getName(), ii); for (IISegment segment : ii.getSegments()) { usedStorageLocation.put(ii.getName().toUpperCase(), segment.getStorageLocationIdentifier()); } return ii; } catch (Exception e) { logger.error("Error during load ii instance " + path, e); return null; } }
private static List<String> getHTableNames(KylinConfig config) { CubeManager cubeMgr = CubeManager.getInstance(config); ArrayList<String> result = new ArrayList<String>(); for (CubeInstance cube : cubeMgr.listAllCubes()) { for (CubeSegment seg : cube.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (!StringUtils.isBlank(tableName)) { result.add(tableName); System.out.println("added new table: " + tableName); } } } for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) { if (ii.getStatus() == RealizationStatusEnum.READY) { for (IISegment seg : ii.getSegments()) {//streaming segment is never "READY" String tableName = seg.getStorageLocationIdentifier(); if (!StringUtils.isBlank(tableName)) { result.add(tableName); System.out.println("added new table: " + tableName); } } } } return result; }
private HadoopShellExecutable createCreateHTableStep(IISegment seg) { HadoopShellExecutable createHtableStep = new HadoopShellExecutable(); createHtableStep.setName(ExecutableConstants.STEP_NAME_CREATE_HBASE_TABLE); StringBuilder cmd = new StringBuilder(); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); createHtableStep.setJobParams(cmd.toString()); createHtableStep.setJobClass(IICreateHTableJob.class); return createHtableStep; }
private HadoopShellExecutable createBulkLoadStep(IISegment seg, String jobId) { HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable(); bulkLoadStep.setName(ExecutableConstants.STEP_NAME_BULK_LOAD_HFILE); StringBuilder cmd = new StringBuilder(); appendExecCmdParameters(cmd, "input", getHFilePath(seg, jobId)); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); bulkLoadStep.setJobParams(cmd.toString()); bulkLoadStep.setJobClass(IIBulkLoadJob.class); return bulkLoadStep; }
String tablename = seg.getStorageLocationIdentifier(); allTablesNeedToBeDropped.remove(tablename); log.info("Remove table " + tablename + " from drop list, as the table belongs to ii " + ii.getName() + " with status " + ii.getStatus());
private MapReduceExecutable createConvertToHfileStep(IISegment seg, String inputPath, String jobId) { MapReduceExecutable createHFilesStep = new MapReduceExecutable(); createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE); StringBuilder cmd = new StringBuilder(); appendMapReduceParameters(cmd, engineConfig); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "input", inputPath); appendExecCmdParameters(cmd, "output", getHFilePath(seg, jobId)); appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier()); appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + seg.getIIInstance().getName() + "_Step"); createHFilesStep.setMapReduceParams(cmd.toString()); createHFilesStep.setMapReduceJobClass(IICreateHFileJob.class); return createHFilesStep; }