@Config("kafka.connect-timeout") public KafkaConnectorConfig setKafkaConnectTimeout(String kafkaConnectTimeout) { this.kafkaConnectTimeout = Duration.valueOf(kafkaConnectTimeout); return this; }
@Config("redis.connect-timeout") public RedisConnectorConfig setRedisConnectTimeout(String redisConnectTimeout) { this.redisConnectTimeout = Duration.valueOf(redisConnectTimeout); return this; }
public static Duration getIndexCardinalityCachePollingDuration(ConnectorSession session) { return Duration.valueOf(session.getProperty(INDEX_CARDINALITY_CACHE_POLLING_DURATION, String.class)); }
public static String preprocessQuery(Optional<String> catalog, Optional<String> schema, String query) throws QueryPreprocessorException { Duration timeout = DEFAULT_PREPROCESSOR_TIMEOUT; String timeoutEnvironment = nullToEmpty(System.getenv(ENV_PREPROCESSOR_TIMEOUT)).trim(); if (!timeoutEnvironment.isEmpty()) { timeout = Duration.valueOf(timeoutEnvironment); } String preprocessorCommand = System.getenv(ENV_PREPROCESSOR); if (emptyToNull(preprocessorCommand) == null) { return query; } return preprocessQuery(catalog, schema, query, ImmutableList.of("/bin/sh", "-c", preprocessorCommand), timeout); }
private ResourceEstimates parseResourceEstimate(HttpServletRequest servletRequest) { ResourceEstimateBuilder builder = new ResourceEstimateBuilder(); for (String header : splitSessionHeader(servletRequest.getHeaders(PRESTO_RESOURCE_ESTIMATE))) { List<String> nameValue = Splitter.on('=').limit(2).trimResults().splitToList(header); assertRequest(nameValue.size() == 2, "Invalid %s header", PRESTO_RESOURCE_ESTIMATE); String name = nameValue.get(0); String value = nameValue.get(1); try { switch (name.toUpperCase()) { case ResourceEstimates.EXECUTION_TIME: builder.setExecutionTime(Duration.valueOf(value)); break; case ResourceEstimates.CPU_TIME: builder.setCpuTime(Duration.valueOf(value)); break; case ResourceEstimates.PEAK_MEMORY: builder.setPeakMemory(DataSize.valueOf(value)); break; default: throw badRequest(format("Unsupported resource name %s", name)); } } catch (IllegalArgumentException e) { throw badRequest(format("Unsupported format for resource estimate '%s': %s", value, e)); } } return builder.build(); }
@Test public void testToPagesWithBlockedOperator() { Operator operator = new BlockedOperator(Duration.valueOf("15 ms")); List<Page> pages = OperatorAssertion.toPages(operator, emptyIterator()); Assert.assertEquals(pages, ImmutableList.of()); }
@Override public SystemAccessControl create(Map<String, String> config) { requireNonNull(config, "config is null"); String configFileName = config.get(SECURITY_CONFIG_FILE); checkState(configFileName != null, "Security configuration must contain the '%s' property", SECURITY_CONFIG_FILE); if (config.containsKey(SECURITY_REFRESH_PERIOD)) { Duration refreshPeriod; try { refreshPeriod = Duration.valueOf(config.get(SECURITY_REFRESH_PERIOD)); } catch (IllegalArgumentException e) { throw invalidRefreshPeriodException(config, configFileName); } if (refreshPeriod.toMillis() == 0) { throw invalidRefreshPeriodException(config, configFileName); } return ForwardingSystemAccessControl.of(memoizeWithExpiration( () -> { log.info("Refreshing system access control from %s", configFileName); return create(configFileName); }, refreshPeriod.toMillis(), MILLISECONDS)); } return create(configFileName); }
new Duration(0, SECONDS), false, value -> Duration.valueOf((String) value), Duration::toString));
this.stagingDirectory = new File(conf.get(S3_STAGING_DIRECTORY, defaults.getS3StagingDirectory().toString())); this.maxAttempts = conf.getInt(S3_MAX_CLIENT_RETRIES, defaults.getS3MaxClientRetries()) + 1; this.maxBackoffTime = Duration.valueOf(conf.get(S3_MAX_BACKOFF_TIME, defaults.getS3MaxBackoffTime().toString())); this.maxRetryTime = Duration.valueOf(conf.get(S3_MAX_RETRY_TIME, defaults.getS3MaxRetryTime().toString())); int maxErrorRetries = conf.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries()); boolean sslEnabled = conf.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled()); Duration connectTimeout = Duration.valueOf(conf.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString())); Duration socketTimeout = Duration.valueOf(conf.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString())); int maxConnections = conf.getInt(S3_MAX_CONNECTIONS, defaults.getS3MaxConnections()); this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes());
@Test public void testJsonRoundTrip() { assertJsonRoundTrip(new ServerInfo(UNKNOWN, "test", true, false, Optional.of(Duration.valueOf("2m")))); assertJsonRoundTrip(new ServerInfo(UNKNOWN, "test", true, false, Optional.empty())); }
@Test public void testGlobalResourceGroupProperties() { H2ResourceGroupsDao dao = setup("global_properties"); dao.createResourceGroupsGlobalPropertiesTable(); dao.insertResourceGroupsGlobalProperties("cpu_quota_period", "1h"); ResourceGroupGlobalProperties globalProperties = new ResourceGroupGlobalProperties(Optional.of(Duration.valueOf("1h"))); ResourceGroupGlobalProperties records = dao.getResourceGroupGlobalProperties().get(0); assertEquals(globalProperties, records); try { dao.insertResourceGroupsGlobalProperties("invalid_property", "1h"); } catch (UnableToExecuteStatementException ex) { assertTrue(ex.getCause() instanceof JdbcSQLException); assertTrue(ex.getCause().getMessage().startsWith("Check constraint violation:")); } try { dao.updateResourceGroupsGlobalProperties("invalid_property_name"); } catch (UnableToExecuteStatementException ex) { assertTrue(ex.getCause() instanceof JdbcSQLException); assertTrue(ex.getCause().getMessage().startsWith("Check constraint violation:")); } }
@Test public void testGetServerInfo() throws Exception { ServerInfo expected = new ServerInfo(UNKNOWN, "test", true, false, Optional.of(Duration.valueOf("2m"))); server.enqueue(new MockResponse() .addHeader(CONTENT_TYPE, "application/json") .setBody(SERVER_INFO_CODEC.toJson(expected))); QueryExecutor executor = new QueryExecutor(new OkHttpClient()); ServerInfo actual = executor.getServerInfo(server.url("/v1/info").uri()); assertEquals(actual.getEnvironment(), "test"); assertEquals(actual.getUptime(), Optional.of(Duration.valueOf("2m"))); assertEquals(server.getRequestCount(), 1); assertEquals(server.takeRequest().getPath(), "/v1/info"); } }
@Description("convert duration string to an interval") @ScalarFunction("parse_duration") @LiteralParameters("x") @SqlType(StandardTypes.INTERVAL_DAY_TO_SECOND) public static long parseDuration(@SqlType("varchar(x)") Slice duration) { try { return Duration.valueOf(duration.toStringUtf8()).toMillis(); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
DateTime.parse("1991-09-06T05:00-05:30"), DateTime.parse("1991-09-06T05:01-05:30"), Duration.valueOf("8m"), Duration.valueOf("7m"), Duration.valueOf("34m"), 13, 14, DataSize.valueOf("24GB"), DataSize.valueOf("25GB"), Duration.valueOf("23m"), Duration.valueOf("24m"), true, ImmutableSet.of(WAITING_FOR_MEMORY),
int maxErrorRetries = config.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries()); boolean sslEnabled = config.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled()); Duration connectTimeout = Duration.valueOf(config.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString())); Duration socketTimeout = Duration.valueOf(config.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString())); int maxConnections = config.getInt(S3_SELECT_PUSHDOWN_MAX_CONNECTIONS, clientConfig.getS3SelectPushdownMaxConnections());
queryRunner, newSessionWithResourceEstimates(new ResourceEstimates( Optional.of(Duration.valueOf("4m")), Optional.empty(), Optional.of(DataSize.valueOf("400MB")))), queryRunner, newSessionWithResourceEstimates(new ResourceEstimates( Optional.of(Duration.valueOf("4m")), Optional.empty(), Optional.of(DataSize.valueOf("600MB")))), queryRunner, newSessionWithResourceEstimates(new ResourceEstimates( Optional.of(Duration.valueOf("4m")), Optional.empty(), Optional.empty())), queryRunner, newSessionWithResourceEstimates(new ResourceEstimates( Optional.of(Duration.valueOf("1s")), Optional.of(Duration.valueOf("1s")), Optional.of(DataSize.valueOf("6TB")))), LONG_LASTING_QUERY, queryRunner, newSessionWithResourceEstimates(new ResourceEstimates( Optional.of(Duration.valueOf("100h")), Optional.empty(),
protected final void setup(String host, int port, String databaseName, String timeZone) { HiveClientConfig hiveClientConfig = getHiveClientConfig(); hiveClientConfig.setTimeZone(timeZone); String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy"); if (proxy != null) { hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy)); } HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port); ExtendedHiveMetastore metastore = new CachingHiveMetastore( new BridgingHiveMetastore(new ThriftHiveMetastore(hiveCluster)), executor, Duration.valueOf("1m"), Duration.valueOf("15s"), 10000); setup(databaseName, hiveClientConfig, metastore); }
new DbResourceGroupConfig().setMaxRefreshInterval(Duration.valueOf("1ms")), daoProvider.get(), ENVIRONMENT);
@Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "No selectors are configured") public void testInvalidConfiguration() { H2DaoProvider daoProvider = setup("selectors"); H2ResourceGroupsDao dao = daoProvider.get(); dao.createResourceGroupsTable(); dao.createSelectorsTable(); dao.insertResourceGroup(1, "global", "100%", 100, 100, 100, null, null, null, null, null, null, ENVIRONMENT); DbResourceGroupConfigurationManager manager = new DbResourceGroupConfigurationManager( (poolId, listener) -> {}, new DbResourceGroupConfig().setMaxRefreshInterval(Duration.valueOf("1ms")), daoProvider.get(), ENVIRONMENT); manager.getSelectors(); }