public static Database convertDatabase(com.amazonaws.services.glue.model.Database glueDb) { return Database.builder() .setDatabaseName(glueDb.getName()) .setLocation(Optional.ofNullable(glueDb.getLocationUri())) .setComment(Optional.ofNullable(glueDb.getDescription())) .setParameters(firstNonNull(glueDb.getParameters(), ImmutableMap.of())) .setOwnerName(PUBLIC_OWNER) .setOwnerType(PrincipalType.ROLE) .build(); }
public Database toDatabase(String databaseName, String location) { return Database.builder() .setDatabaseName(databaseName) .setLocation(Optional.of(location)) .setOwnerName(ownerName) .setOwnerType(ownerType) .setParameters(parameters) .build(); } }
@Override public Optional<Database> getDatabase(String databaseName) { return super.getDatabase(databaseName) .map(database -> Database.builder(database) .setLocation(Optional.of(basePath.toString())) .build()); }
@Override public void createDatabase(Database database) { if (!database.getLocation().isPresent() && defaultDir.isPresent()) { String databaseLocation = new Path(defaultDir.get(), database.getDatabaseName()).toString(); database = Database.builder(database) .setLocation(Optional.of(databaseLocation)) .build(); } try { DatabaseInput databaseInput = GlueInputConverter.convertDatabase(database); glueClient.createDatabase(new CreateDatabaseRequest().withDatabaseInput(databaseInput)); } catch (AlreadyExistsException e) { throw new SchemaAlreadyExistsException(database.getDatabaseName()); } catch (AmazonServiceException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } if (database.getLocation().isPresent()) { HiveWriteUtils.createDirectory(hdfsContext, hdfsEnvironment, new Path(database.getLocation().get())); } }
@Override public void createSchema(ConnectorSession session, String schemaName, Map<String, Object> properties) { Optional<String> location = HiveSchemaProperties.getLocation(properties).map(locationUri -> { try { hdfsEnvironment.getFileSystem(new HdfsContext(session, schemaName), new Path(locationUri)); } catch (IOException e) { throw new PrestoException(INVALID_SCHEMA_PROPERTY, "Invalid location URI: " + locationUri, e); } return locationUri; }); Database database = Database.builder() .setDatabaseName(schemaName) .setLocation(location) .setOwnerType(USER) .setOwnerName(session.getUser()) .build(); metastore.createDatabase(database); }
public static com.facebook.presto.hive.metastore.Database getPrestoTestDatabase() { return com.facebook.presto.hive.metastore.Database.builder() .setDatabaseName("test-db" + generateRandom()) .setComment(Optional.of("database desc")) .setLocation(Optional.of("/db")) .setParameters(ImmutableMap.of()) .setOwnerName("PUBLIC") .setOwnerType(PrincipalType.ROLE).build(); }
public static Database fromMetastoreApiDatabase(org.apache.hadoop.hive.metastore.api.Database database) { String ownerName = "PUBLIC"; PrincipalType ownerType = PrincipalType.ROLE; if (database.getOwnerName() != null) { ownerName = database.getOwnerName(); ownerType = fromMetastoreApiPrincipalType(database.getOwnerType()); } Map<String, String> parameters = database.getParameters(); if (parameters == null) { parameters = ImmutableMap.of(); } return Database.builder() .setDatabaseName(database.getName()) .setLocation(Optional.ofNullable(database.getLocationUri())) .setOwnerName(ownerName) .setOwnerType(ownerType) .setComment(Optional.ofNullable(database.getDescription())) .setParameters(parameters) .build(); }
private static Database createDatabaseMetastoreObject(String name) { return Database.builder() .setDatabaseName(name) .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build(); }
@BeforeClass public void initialize() { tempDir = Files.createTempDir(); ExtendedHiveMetastore metastore = createMetastore(tempDir); metastore.createDatabase(Database.builder() .setDatabaseName(testDbName) .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build()); HiveClientConfig hiveConfig = new HiveClientConfig() .setTimeZone("America/Los_Angeles"); setup(testDbName, hiveConfig, metastore); }
metastore.createDatabase(Database.builder() .setDatabaseName("tpch") .setOwnerName("public")
private static DistributedQueryRunner createQueryRunner() throws Exception { DistributedQueryRunner queryRunner = new DistributedQueryRunner(testSessionBuilder() .setSource(TestSpatialJoins.class.getSimpleName()) .setCatalog("hive") .setSchema("default") .build(), 4); queryRunner.installPlugin(new GeoPlugin()); File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data").toFile(); HiveClientConfig hiveClientConfig = new HiveClientConfig(); HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig)); HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig, new NoHdfsAuthentication()); FileHiveMetastore metastore = new FileHiveMetastore(hdfsEnvironment, baseDir.toURI().toString(), "test"); metastore.createDatabase(Database.builder() .setDatabaseName("default") .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build()); queryRunner.installPlugin(new HivePlugin("hive", Optional.of(metastore))); queryRunner.createCatalog("hive", "hive"); return queryRunner; }
@Override public Optional<Database> getDatabase(String databaseName) { return super.getDatabase(databaseName) .map(database -> Database.builder(database) .setLocation(Optional.of(basePath.toString())) .build()); }
public static com.facebook.presto.hive.metastore.Database getPrestoTestDatabase() { return com.facebook.presto.hive.metastore.Database.builder() .setDatabaseName("test-db" + generateRandom()) .setComment(Optional.of("database desc")) .setLocation(Optional.of("/db")) .setParameters(ImmutableMap.of()) .setOwnerName("PUBLIC") .setOwnerType(PrincipalType.ROLE).build(); }
private static Database createDatabaseMetastoreObject(String name) { return Database.builder() .setDatabaseName(name) .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build(); }
@BeforeClass public void initialize() { tempDir = Files.createTempDir(); ExtendedHiveMetastore metastore = createMetastore(tempDir); metastore.createDatabase(Database.builder() .setDatabaseName(testDbName) .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build()); HiveClientConfig hiveConfig = new HiveClientConfig() .setTimeZone("America/Los_Angeles"); setup(testDbName, hiveConfig, metastore); }
metastore.createDatabase(Database.builder() .setDatabaseName("tpch") .setOwnerName("public")
private static DistributedQueryRunner createQueryRunner() throws Exception { DistributedQueryRunner queryRunner = new DistributedQueryRunner(testSessionBuilder() .setSource(TestSpatialJoins.class.getSimpleName()) .setCatalog("hive") .setSchema("default") .build(), 4); queryRunner.installPlugin(new GeoPlugin()); File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data").toFile(); HiveClientConfig hiveClientConfig = new HiveClientConfig(); HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig)); HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig, new NoHdfsAuthentication()); FileHiveMetastore metastore = new FileHiveMetastore(hdfsEnvironment, baseDir.toURI().toString(), "test"); metastore.createDatabase(Database.builder() .setDatabaseName("default") .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build()); queryRunner.installPlugin(new HivePlugin("hive", Optional.of(metastore))); queryRunner.createCatalog("hive", "hive"); return queryRunner; }