private TableDesc getTableDesc(String tableName) { return MetadataManager.getInstance(this.config).getTableDesc(tableName); }
private ExecutableDao(KylinConfig config) { logger.info("Using metadata url: " + config); this.store = MetadataManager.getInstance(config).getStore(); }
/** * Tell MetadataManager that the instance has changed. The cube info will * be stored Reload the cube desc and source table A broadcast must be sent * out * * @return * @throws IOException */ public void reload() { clearCache(); getInstance(config); }
private void loadH2Table(String tableName, String joinType) throws SQLException { MetadataManager metaMgr = MetadataManager.getInstance(config); TableDesc tableDesc = metaMgr.getTableDesc(tableName.toUpperCase()); File tempFile = null; InputStream csvStream = metaMgr.getStore().getResource(normalPath + fileNameSuffix); if (csvStream == null) { csvStream = metaMgr.getStore().getResource(normalPath); } else { logger.info("H2 decides to load " + (normalPath + fileNameSuffix) + " for table " + tableDesc.getIdentity());
public HiveTable(MetadataManager metaMgr, String table) { TableDesc tableDesc = metaMgr.getTableDesc(table); this.database = tableDesc.getDatabase(); this.hiveTable = tableDesc.getName(); this.nColumns = tableDesc.getColumnCount(); }
private static List<String> extractHiveTables(String database, Set<String> tables, KylinConfig config) throws IOException { MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv()); for (String tableName : tables) { Table table = null; TableDesc tableDesc = metaMgr.getTableDesc(database + "." + tableName); if (tableDesc == null) { tableDesc = new TableDesc(); Map<String, String> map = metaMgr.getTableDescExd(tableDesc.getIdentity()); map.put(MetadataConstants.TABLE_EXD_PARTITIONED, Boolean.valueOf(partitionFields != null && partitionFields.size() > 0).toString()); metaMgr.saveSourceTable(tableDesc); metaMgr.saveTableExd(tableDesc.getIdentity(), map); loadedTables.add(tableDesc.getIdentity());
private MetadataManager getMetadataManager() { return MetadataManager.getInstance(config); }
public void init(MetadataManager metadataManager) { config = metadataManager.getConfig(); this.model = MetadataManager.getInstance(config).getDataModelDesc(this.modelName);
public void init(KylinConfig config, Map<String, TableDesc> tables) { this.errors.clear(); this.config = config; if (this.modelName == null || this.modelName.length() == 0) { this.addError("The cubeDesc '" + this.getName() + "' doesn't have data model specified."); } this.model = MetadataManager.getInstance(config).getDataModelDesc(this.modelName); if (this.model == null) { this.addError("No data model found with name '" + modelName + "'."); } for (DimensionDesc dim : dimensions) { dim.init(this, tables); } sortDimAndMeasure(); initDimensionColumns(tables); initMeasureColumns(tables); rowkey.init(this); if (hbaseMapping != null) { hbaseMapping.init(this); } initMeasureReferenceToColumnFamily(); // check all dimension columns are presented on rowkey List<TblColRef> dimCols = listDimensionColumnsExcludingDerived(); if (rowkey.getRowKeyColumns().length != dimCols.size()) { addError("RowKey columns count (" + rowkey.getRowKeyColumns().length + ") does not match dimension columns count (" + dimCols.size() + "). "); } }
@Test public void testGetInstance() throws Exception { Assert.assertNotNull(MetadataManager.getInstance(getTestConfig())); Assert.assertNotNull(MetadataManager.getInstance(getTestConfig()).listAllTables()); Assert.assertTrue(MetadataManager.getInstance(getTestConfig()).listAllTables().size() > 0); }
private DataModelDesc reloadDataModelDescAt(String path) { ResourceStore store = getStore(); try { DataModelDesc dataModelDesc = store.getResource(path, DataModelDesc.class, MODELDESC_SERIALIZER); dataModelDesc.init(this.getAllTablesMap()); dataModelDescMap.putLocal(dataModelDesc.getName(), dataModelDesc); return dataModelDesc; } catch (IOException e) { throw new IllegalStateException("Error to load" + path, e); } }
@SuppressWarnings("unchecked") private Map<String, String> reloadSourceTableExdAt(String path) throws IOException { Map<String, String> attrs = Maps.newHashMap(); ResourceStore store = getStore(); InputStream is = store.getResource(path); if (is == null) { logger.warn("Failed to get table exd info from " + path); return null; } try { attrs.putAll(JsonUtil.readValue(is, HashMap.class)); } finally { if (is != null) is.close(); } // parse table identity from file name String file = path; if (file.indexOf("/") > -1) { file = file.substring(file.lastIndexOf("/") + 1); } String tableIdentity = file.substring(0, file.length() - MetadataConstants.FILE_SURFIX.length()).toUpperCase(); srcTableExdMap.putLocal(tableIdentity, attrs); return attrs; }
public String appendDBName(String table) { if (table.indexOf(".") > 0) return table; Map<String, TableDesc> map = getAllTablesMap(); int count = 0; String result = null; for (TableDesc t : map.values()) { if (t.getName().equalsIgnoreCase(table)) { result = t.getIdentity(); count++; } } if (count == 1) return result; if (count > 1) { logger.warn("There are more than 1 table named with '" + table + "' in different database; The program couldn't determine, randomly pick '" + result + "'"); } return result; }
@Before public void setUp() throws Exception { this.createTestMetadata(); MetadataManager.clearCache(); }
private void reloadAllSourceTable() throws IOException { ResourceStore store = getStore(); logger.debug("Reloading SourceTable from folder " + store.getReadableResourcePath(ResourceStore.TABLE_RESOURCE_ROOT)); srcTableMap.clear(); List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX); for (String path : paths) { reloadSourceTableAt(path); } logger.debug("Loaded " + srcTableMap.size() + " SourceTable(s)"); }
private void reloadAllSourceTableExd() throws IOException { ResourceStore store = getStore(); logger.debug("Reloading SourceTable exd info from folder " + store.getReadableResourcePath(ResourceStore.TABLE_EXD_RESOURCE_ROOT)); srcTableExdMap.clear(); List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_EXD_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX); for (String path : paths) { reloadSourceTableExdAt(path); } logger.debug("Loaded " + srcTableExdMap.size() + " SourceTable EXD(s)"); }
private void reloadAllDataModel() throws IOException { ResourceStore store = getStore(); logger.debug("Reloading DataModel from folder " + store.getReadableResourcePath(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT)); dataModelDescMap.clear(); List<String> paths = store.collectResourceRecursively(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX); for (String path : paths) { try { reloadDataModelDescAt(path); } catch (IllegalStateException e) { logger.error("Error to load DataModel at " + path, e); continue; } } logger.debug("Loaded " + dataModelDescMap.size() + " DataModel(s)"); }
public DataModelDesc dropModel(DataModelDesc desc) throws IOException { logger.info("Dropping model '" + desc.getName() + "'"); ResourceStore store = getStore(); if (desc != null) store.deleteResource(desc.getResourcePath()); // clean model cache this.afterModelDropped(desc); return desc; }
try { String modelName = this.getCubeDescManager().getCubeDesc(newInstance.getDescName()).getModelName(); dataModelDesc = this.getMetadataManager().getDataModelDesc(modelName); Map<String, String> pkToFK = Maps.newHashMap(); for (LookupDesc lookupDesc : dataModelDesc.getLookups()) {
private static void deployHiveTables() throws Exception { MetadataManager metaMgr = MetadataManager.getInstance(config()); localBufferFile.createNewFile(); InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv"); FileOutputStream localFileStream = new FileOutputStream(localBufferFile); IOUtils.copy(hbaseDataStream, localFileStream); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CATEGORY_GROUPINGS.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_KYLIN_FACT.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SELLER_TYPE_DIM.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SITES.toUpperCase())));