private TableDesc getTableDesc(String tableName) { return MetadataManager.getInstance(this.config).getTableDesc(tableName); }
public List<TableDesc> listTables() { MetadataManager metaMgr = MetadataManager.getInstance(config); HashSet<String> tableNames = new HashSet<String>(); List<TableDesc> result = new ArrayList<TableDesc>(); tableNames.add(factTable.toUpperCase()); for (DimensionDesc dim : dimensions) { String table = dim.getTable(); if (table != null) tableNames.add(table.toUpperCase()); } for (String tableName : tableNames) { result.add(metaMgr.getTableDesc(tableName)); } return result; }
/** * @deprecated use getModel().getAllTables() instead * @return */ public List<TableDesc> listTables() { MetadataManager metaMgr = MetadataManager.getInstance(config); HashSet<String> tableNames = new HashSet<String>(); List<TableDesc> result = new ArrayList<TableDesc>(); tableNames.add(this.getFactTable().toUpperCase()); for (DimensionDesc dim : dimensions) { String table = dim.getTable(); if (table != null) tableNames.add(table.toUpperCase()); } for (String tableName : tableNames) { result.add(metaMgr.getTableDesc(tableName)); } return result; }
public HiveTable(MetadataManager metaMgr, String table) { TableDesc tableDesc = metaMgr.getTableDesc(table); this.database = tableDesc.getDatabase(); this.hiveTable = tableDesc.getName(); this.nColumns = tableDesc.getColumnCount(); }
public ProjectInstance addTableDescToProject(String[] tableIdentities, String projectName) throws IOException { MetadataManager metaMgr = getMetadataManager(); ProjectInstance projectInstance = getProject(projectName); for (String tableId : tableIdentities) { TableDesc table = metaMgr.getTableDesc(tableId); if (table == null) { throw new IllegalStateException("Cannot find table '" + table + "' in metadata manager"); } projectInstance.addTable(table.getIdentity()); } saveResource(projectInstance); return projectInstance; }
protected void attachKylinPropsAndMetadata(CubeInstance cube, Configuration conf) throws IOException { File tmp = File.createTempFile("kylin_job_meta", ""); tmp.delete(); // we need a directory, so delete the file first File metaDir = new File(tmp, "meta"); metaDir.mkdirs(); metaDir.getParentFile().deleteOnExit(); // write kylin.properties KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); File kylinPropsFile = new File(metaDir, "kylin.properties"); kylinConfig.writeProperties(kylinPropsFile); // write cube / model_desc / cube_desc / dict / table ArrayList<String> dumpList = new ArrayList<String>(); dumpList.add(cube.getResourcePath()); dumpList.add(cube.getDescriptor().getModel().getResourcePath()); dumpList.add(cube.getDescriptor().getResourcePath()); for (String tableName : cube.getDescriptor().getModel().getAllTables()) { TableDesc table = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName); dumpList.add(table.getResourcePath()); } for (CubeSegment segment : cube.getSegments()) { dumpList.addAll(segment.getDictionaryPaths()); } dumpResources(kylinConfig, metaDir, dumpList); // hadoop distributed cache conf.set("tmpfiles", "file:///" + OptionsHelper.convertToFileURL(metaDir.getAbsolutePath())); }
return; TableDesc table = MetadataManager.getInstance(cube.getConfig()).getTableDesc(factTable); if (table == null) { context.addResult(ResultLevel.ERROR, "Fact table can not be found: " + cube);
protected void attachKylinPropsAndMetadata(IIInstance ii, Configuration conf) throws IOException { File tmp = File.createTempFile("kylin_job_meta", ""); tmp.delete(); // we need a directory, so delete the file first File metaDir = new File(tmp, "meta"); metaDir.mkdirs(); metaDir.getParentFile().deleteOnExit(); // write kylin.properties KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); File kylinPropsFile = new File(metaDir, "kylin.properties"); kylinConfig.writeProperties(kylinPropsFile); // write II / model_desc / II_desc / dict / table ArrayList<String> dumpList = new ArrayList<String>(); dumpList.add(ii.getResourcePath()); dumpList.add(ii.getDescriptor().getModel().getResourcePath()); dumpList.add(ii.getDescriptor().getResourcePath()); for (String tableName : ii.getDescriptor().getModel().getAllTables()) { TableDesc table = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName); dumpList.add(table.getResourcePath()); } for (IISegment segment : ii.getSegments()) { dumpList.addAll(segment.getDictionaryPaths()); } dumpResources(kylinConfig, metaDir, dumpList); // hadoop distributed cache conf.set("tmpfiles", "file:///" + OptionsHelper.convertToFileURL(metaDir.getAbsolutePath())); }
private boolean matchAllCompositeKeys(TreeMap<String, String> lookupCol2FactTableCol, LinkedList<String> columnValues) { KylinConfig config = KylinConfig.getInstanceFromEnv(); for (String lookupTable : lookupTableKeys.keySet()) { if (lookupTableKeys.get(lookupTable).size() == 1) continue; String[] comboKey = new String[lookupTableKeys.get(lookupTable).size()]; int index = 0; for (String column : lookupTableKeys.get(lookupTable)) { String key = lookupTable + "/" + column; String factTableCol = lookupCol2FactTableCol.get(key); int cardinal = MetadataManager.getInstance(config).getTableDesc(factTableName).findColumnByName(factTableCol).getZeroBasedIndex(); comboKey[index] = columnValues.get(cardinal); index++; } Array<String> wrap = new Array<String>(comboKey); if (!lookupTableCompositeKeyValues.get(lookupTable).contains(wrap)) { // System.out.println("Try " + wrap + " Failed, continue..."); return false; } } return true; }
public LookupStringTable getLookupTable(CubeSegment cubeSegment, DimensionDesc dim) { String tableName = dim.getTable(); String[] pkCols = dim.getJoin().getPrimaryKey(); String snapshotResPath = cubeSegment.getSnapshotResPath(tableName); if (snapshotResPath == null) throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment" + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment); try { SnapshotTable snapshot = getSnapshotManager().getSnapshotTable(snapshotResPath); TableDesc tableDesc = getMetadataManager().getTableDesc(tableName); return new LookupStringTable(tableDesc, pkCols, snapshot); } catch (IOException e) { throw new IllegalStateException("Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e); } }
private boolean sanityCheck(ProjectCache prjCache, IRealization realization) { MetadataManager metaMgr = mgr.getMetadataManager(); List<TblColRef> allColumns = realization.getAllColumns(); if (allColumns == null || allColumns.isEmpty()) { logger.error("Realization '" + realization.getCanonicalName() + "' does not report any columns"); return false; } for (TblColRef col : allColumns) { TableDesc table = metaMgr.getTableDesc(col.getTable()); if (table == null) { logger.error("Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName() + "', but its table is not found by MetadataManager"); return false; } ColumnDesc foundCol = table.findColumnByName(col.getName()); if (col.getColumn().equals(foundCol) == false) { logger.error("Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName() + "', but it is not equal to '" + foundCol + "' according to MetadataManager"); return false; } // auto-define table required by realization for some legacy test case if (prjCache.tables.get(table.getIdentity()) == null) { prjCache.tables.put(table.getIdentity(), new TableCache(table)); logger.warn("Realization '" + realization.getCanonicalName() + "' reports columcn '" + col.getCanonicalName() + "' whose table is not defined in project '" + prjCache.project + "'"); } } return true; }
private LinkedList<String> createRow(TreeMap<String, String> factTableCol2LookupCol, TreeSet<String> usedCols, TreeSet<String> defaultColumns) throws Exception { KylinConfig config = KylinConfig.getInstanceFromEnv(); LinkedList<String> columnValues = new LinkedList<String>(); for (ColumnDesc cDesc : MetadataManager.getInstance(config).getTableDesc(factTableName).getColumns()) { String colName = cDesc.getName(); if (factTableCol2LookupCol.containsKey(colName)) { // if the current column is a fk column in fact table ArrayList<String> candidates = this.feasibleValues.get(factTableCol2LookupCol.get(colName)); columnValues.add(candidates.get(r.nextInt(candidates.size()))); } else if (usedCols.contains(colName)) { // if the current column is a metric column in fact table columnValues.add(createCell(cDesc)); } else { // otherwise this column is not useful in OLAP columnValues.add(createDefaultsCell(cDesc.getTypeName())); defaultColumns.add(colName); } } return columnValues; }
public LookupTable initLookupTable() throws Exception { MetadataManager metaMgr = MetadataManager.getInstance(config); String tableName = "EDW.TEST_CAL_DT"; String[] pkCols = new String[]{"CAL_DT"}; String snapshotResPath = "/table_snapshot/TEST_CAL_DT.csv/4af48c94-86de-4e22-a4fd-c49b06cbaa4f.snapshot"; SnapshotTable snapshot = getSnapshotManager().getSnapshotTable(snapshotResPath); TableDesc tableDesc = metaMgr.getTableDesc(tableName); LookupTable lt = new LookupStringTable(tableDesc, pkCols, snapshot); System.out.println(lt); return lt; }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); metadataManager = MetadataManager.getInstance(config); cube = CubeManager.getInstance(config).getCube(cubeName); cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW); cubeDesc = cube.getDescriptor(); factTableDesc = metadataManager.getTableDesc(cubeDesc.getFactTable()); long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc); baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId); // intermediateTableDesc = new // JoinedFlatTableDesc(cube.getDescriptor()); rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid); measureCodec = new MeasureCodec(cubeDesc.getMeasures()); measures = new Object[cubeDesc.getMeasures().size()]; int colCount = cubeDesc.getRowkey().getRowKeyColumns().length; keyBytesBuf = new byte[colCount][]; bytesSplitter = new BytesSplitter(factTableDesc.getColumns().length, 4096); nullValue = new byte[] { (byte) '\\', (byte) 'N' };// As in Hive, null // value is // represented by \N prepareJoins(); prepareMetrics(); }
public SnapshotTable buildSnapshotTable(CubeSegment cubeSeg, String lookupTable) throws IOException { MetadataManager metaMgr = getMetadataManager(); SnapshotManager snapshotMgr = getSnapshotManager(); HiveTable hiveTable = new HiveTable(metaMgr, lookupTable); TableDesc tableDesc = metaMgr.getTableDesc(lookupTable); SnapshotTable snapshot = snapshotMgr.buildSnapshot(hiveTable, tableDesc); cubeSeg.putSnapshotResPath(lookupTable, snapshot.getResourcePath()); saveResource(cubeSeg.getCubeInstance()); return snapshot; }
@Test public void testBasic() throws Exception { TableDesc siteTable = MetadataManager.getInstance(getTestConfig()).getTableDesc("EDW.TEST_SITES"); TableDesc categoryTable = MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.test_category_groupings"); LookupBytesTable lookup; System.out.println("============================================================================"); File f = new File(LOCALMETA_TEST_DATA + "/data/EDW.TEST_SITES.csv"); lookup = new LookupBytesTable(siteTable, new String[] { "SITE_ID" }, new FileTable("file://" + f.getAbsolutePath(), 10)); lookup.dump(); System.out.println("============================================================================"); f = new File(LOCALMETA_TEST_DATA + "/data/DEFAULT.TEST_CATEGORY_GROUPINGS.csv"); lookup = new LookupBytesTable(categoryTable, new String[] { "leaf_categ_id", "site_id" }, new FileTable("file://" + f.getAbsolutePath(), 36)); lookup.dump(); System.out.println("============================================================================"); ByteArray k1 = new ByteArray(Bytes.toBytes("533")); ByteArray k2 = new ByteArray(Bytes.toBytes("0")); Array<ByteArray> key = new Array<ByteArray>(new ByteArray[] { k1, k2 }); System.out.println(lookup.getRow(key)); } }
@Test public void testFindTableByName() throws Exception { TableDesc table = MetadataManager.getInstance(getTestConfig()).getTableDesc("EDW.TEST_CAL_DT"); Assert.assertNotNull(table); Assert.assertEquals("EDW.TEST_CAL_DT", table.getIdentity()); }
@Test public void testTableDescUpgrade() throws Exception { MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv()); TableDesc fact = metaMgr.getTableDesc("default.test_kylin_fact"); @SuppressWarnings("deprecation") String oldResLocation = fact.getResourcePathV1(); String newResLocation = fact.getResourcePath(); ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv()); Assert.assertTrue(store.exists(newResLocation)); Assert.assertTrue(!store.exists(oldResLocation)); String oldExdResLocation = TableDesc.concatExdResourcePath("test_kylin_fact".toUpperCase()); String newExdResLocation = TableDesc.concatExdResourcePath("default.test_kylin_fact".toUpperCase()); Assert.assertTrue(store.exists(newExdResLocation)); Assert.assertTrue(!store.exists(oldExdResLocation)); }
@Before public void setup() throws IOException { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment()); factTableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.TEST_KYLIN_FACT"); TblColRef formatName = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME"); TblColRef siteId = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_SITE_ID"); Collection<TblColRef> dims = new HashSet<>(); dims.add(formatName); projector = CoprocessorProjector.makeForEndpoint(tableRecordInfo, dims); aggregators = EndpointAggregators.fromFunctions(tableRecordInfo, buildAggregations()); CompareTupleFilter rawFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ); rawFilter.addChild(new ColumnTupleFilter(siteId)); rawFilter.addChild(new ConstantTupleFilter("0")); filter = CoprocessorFilter.fromFilter(this.ii.getFirstSegment(), rawFilter); aggCache = new EndpointAggregationCache(aggregators); tableData = mockTable(); }
@Test public void basicTest() throws Exception { String tableName = "EDW.TEST_SITES"; HiveTable hiveTable = new HiveTable(MetadataManager.getInstance(getTestConfig()), tableName); TableDesc tableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc(tableName); String snapshotPath = snapshotMgr.buildSnapshot(hiveTable, tableDesc).getResourcePath(); snapshotMgr.wipeoutCache(); SnapshotTable snapshot = snapshotMgr.getSnapshotTable(snapshotPath); // compare hive & snapshot TableReader hiveReader = hiveTable.getReader(); TableReader snapshotReader = snapshot.getReader(); while (true) { boolean hiveNext = hiveReader.next(); boolean snapshotNext = snapshotReader.next(); assertEquals(hiveNext, snapshotNext); if (hiveNext == false) break; String[] hiveRow = hiveReader.getRow(); String[] snapshotRow = snapshotReader.getRow(); assertArrayEquals(hiveRow, snapshotRow); } } }