public static SqlTimestamp sqlTimestampOf(long millis, ConnectorSession session) { if (session.isLegacyTimestamp()) { return new SqlTimestamp(millis, session.getTimeZoneKey()); } else { return new SqlTimestamp(millis); } }
public HdfsContext(ConnectorSession session, String schemaName) { requireNonNull(session, "session is null"); requireNonNull(schemaName, "schemaName is null"); this.identity = requireNonNull(session.getIdentity(), "session.getIdentity() is null"); this.source = requireNonNull(session.getSource(), "session.getSource()"); this.queryId = Optional.of(session.getQueryId()); this.schemaName = Optional.of(schemaName); this.tableName = Optional.empty(); }
public static DataSize getOrcOptimizedWriterMinStripeSize(ConnectorSession session) { return session.getProperty(ORC_OPTIMIZED_WRITER_MIN_STRIPE_SIZE, DataSize.class); }
private static long parseMillis(ConnectorSession session, Slice dateTime, Slice formatString) { TimeZoneKey timeZoneKey = UTC_KEY; if (session.isLegacyTimestamp()) { timeZoneKey = session.getTimeZoneKey(); } return parseMillis(timeZoneKey, session.getLocale(), dateTime, formatString); }
@Description("current timestamp without time zone") @ScalarFunction("localtimestamp") @SqlType(StandardTypes.TIMESTAMP) public static long localTimestamp(ConnectorSession session) { if (session.isLegacyTimestamp()) { return session.getStartTime(); } ISOChronology localChronology = getChronology(session.getTimeZoneKey()); return localChronology.getZone().convertUTCToLocal(session.getStartTime()); }
public static Session toSession(ConnectorTransactionHandle transactionHandle, ConnectorSession session) { TransactionId transactionId = ((GlobalSystemTransactionHandle) transactionHandle).getTransactionId(); ConnectorIdentity connectorIdentity = session.getIdentity(); Identity identity = new Identity(connectorIdentity.getUser(), connectorIdentity.getPrincipal()); return Session.builder(new SessionPropertyManager(SYSTEM_SESSION_PROPERTIES)) .setQueryId(new QueryId(session.getQueryId())) .setTransactionId(transactionId) .setCatalog("catalog") .setSchema("schema") .setPath(new SqlPath(Optional.of("path"))) .setIdentity(identity) .setTimeZoneKey(session.getTimeZoneKey()) .setLocale(session.getLocale()) .setStartTime(session.getStartTime()) .build(); } }
@Description("current timestamp with time zone") @ScalarFunction(value = "current_timestamp", alias = "now") @SqlType(StandardTypes.TIMESTAMP_WITH_TIME_ZONE) public static long currentTimestamp(ConnectorSession session) { return packDateTimeWithZone(session.getStartTime(), session.getTimeZoneKey()); }
.put(PRESTO_VIEW_FLAG, "true") .put(PRESTO_VERSION_NAME, prestoVersion) .put(PRESTO_QUERY_ID_NAME, session.getQueryId()) .build(); .setDatabaseName(viewName.getSchemaName()) .setTableName(viewName.getTableName()) .setOwner(session.getUser()) .setTableType(TableType.VIRTUAL_VIEW.name()) .setDataColumns(ImmutableList.of(dummyColumn)) .setLocation(""); Table table = tableBuilder.build(); PrincipalPrivileges principalPrivileges = buildInitialPrivilegeSet(session.getUser());
@Override public void abort() { // Must be wrapped in doAs entirely // Implicit FileSystem initializations are possible in HiveRecordWriter#rollback -> RecordWriter#close hdfsEnvironment.doAs(session.getUser(), this::doAbort); }
Optional<Long> size; try { size = Optional.of(hdfsEnvironment.getFileSystem(session.getUser(), path, conf).getFileStatus(path).getLen()); session.getQueryId(), path.toString(), schemaName, nodeManager.getCurrentNode().getVersion(), nodeManager.getCurrentNode().getHttpUri().getHost(), session.getIdentity().getPrincipal().map(Principal::getName).orElse(null), nodeManager.getEnvironment(), sessionProperties, FileSystem fileSystem; try { fileSystem = hdfsEnvironment.getFileSystem(session.getUser(), path, conf);
private HiveSplitSource( ConnectorSession session, String databaseName, String tableName, TupleDomain<? extends ColumnHandle> compactEffectivePredicate, PerBucket queues, int maxInitialSplits, DataSize maxOutstandingSplitsSize, HiveSplitLoader splitLoader, AtomicReference<State> stateReference, CounterStat highMemorySplitSourceCounter) { requireNonNull(session, "session is null"); this.queryId = session.getQueryId(); this.databaseName = requireNonNull(databaseName, "databaseName is null"); this.tableName = requireNonNull(tableName, "tableName is null"); this.compactEffectivePredicate = requireNonNull(compactEffectivePredicate, "compactEffectivePredicate is null"); this.queues = requireNonNull(queues, "queues is null"); this.maxOutstandingSplitsBytes = toIntExact(maxOutstandingSplitsSize.toBytes()); this.splitLoader = requireNonNull(splitLoader, "splitLoader is null"); this.stateReference = requireNonNull(stateReference, "stateReference is null"); this.highMemorySplitSourceCounter = requireNonNull(highMemorySplitSourceCounter, "highMemorySplitSourceCounter is null"); this.maxSplitSize = getMaxSplitSize(session); this.maxInitialSplitSize = getMaxInitialSplitSize(session); this.remainingInitialSplits = new AtomicInteger(maxInitialSplits); }
@Description("parses the specified date/time by the given format") @ScalarFunction @LiteralParameters({"x", "y"}) @SqlType(StandardTypes.TIMESTAMP_WITH_TIME_ZONE) public static long parseDatetime(ConnectorSession session, @SqlType("varchar(x)") Slice datetime, @SqlType("varchar(y)") Slice formatString) { try { return packDateTimeWithZone(parseDateTimeHelper( DateTimeFormat.forPattern(formatString.toStringUtf8()) .withChronology(getChronology(session.getTimeZoneKey())) .withOffsetParsed() .withLocale(session.getLocale()), datetime.toStringUtf8())); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
@Override public Optional<ConnectorOutputMetadata> finishInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics) { RaptorInsertTableHandle handle = (RaptorInsertTableHandle) insertHandle; long transactionId = handle.getTransactionId(); long tableId = handle.getTableId(); Optional<String> externalBatchId = handle.getExternalBatchId(); List<ColumnInfo> columns = handle.getColumnHandles().stream().map(ColumnInfo::fromHandle).collect(toList()); long updateTime = session.getStartTime(); Collection<ShardInfo> shards = parseFragments(fragments); log.info("Committing insert into tableId %s (queryId: %s, shards: %s, columns: %s)", handle.getTableId(), session.getQueryId(), shards.size(), columns.size()); shardManager.commitShards(transactionId, tableId, columns, shards, externalBatchId, updateTime); clearRollback(); return Optional.empty(); }
default String getUser() { return getIdentity().getUser(); }
@Description("Converts a string to a DATE data type") @ScalarFunction("to_date") @SqlType(StandardTypes.DATE) public static long toDate(ConnectorSession session, @SqlType(StandardTypes.VARCHAR) Slice dateTime, @SqlType(StandardTypes.VARCHAR) Slice formatString) { try { long millis = parseMillis(UTC_KEY, session.getLocale(), dateTime, formatString); return MILLISECONDS.toDays(millis); } catch (Throwable t) { throwIfInstanceOf(t, Error.class); throwIfInstanceOf(t, PrestoException.class); throw new PrestoException(GENERIC_INTERNAL_ERROR, t); } }
private static long timeAtTimeZone(ConnectorSession session, long timeWithTimeZone, TimeZoneKey timeZoneKey) { DateTimeZone sourceTimeZone = getDateTimeZone(unpackZoneKey(timeWithTimeZone)); DateTimeZone targetTimeZone = getDateTimeZone(timeZoneKey); long millis = unpackMillisUtc(timeWithTimeZone); // STEP 1. Calculate source UTC millis in session start millis += valueToSessionTimeZoneOffsetDiff(session.getStartTime(), sourceTimeZone); // STEP 2. Calculate target UTC millis in 1970 millis -= valueToSessionTimeZoneOffsetDiff(session.getStartTime(), targetTimeZone); // STEP 3. Make sure that value + offset is in 0 - 23:59:59.999 long localMillis = millis + targetTimeZone.getOffset(0); // Loops up to 2 times in total while (localMillis > TimeUnit.DAYS.toMillis(1)) { millis -= TimeUnit.DAYS.toMillis(1); localMillis -= TimeUnit.DAYS.toMillis(1); } while (localMillis < 0) { millis += TimeUnit.DAYS.toMillis(1); localMillis += TimeUnit.DAYS.toMillis(1); } return packDateTimeWithZone(millis, timeZoneKey); }
@ScalarFunction @LiteralParameters("x") @SqlType(StandardTypes.VARCHAR) public static Slice dateFormat(ConnectorSession session, @SqlType(StandardTypes.TIMESTAMP) long timestamp, @SqlType("varchar(x)") Slice formatString) { if (session.isLegacyTimestamp()) { return dateFormat(getChronology(session.getTimeZoneKey()), session.getLocale(), timestamp, formatString); } else { return dateFormat(UTC_CHRONOLOGY, session.getLocale(), timestamp, formatString); } }
@Description("current timestamp without time zone") @ScalarFunction("localtimestamp") @SqlType(StandardTypes.TIMESTAMP) public static long localTimestamp(ConnectorSession session) { if (session.isLegacyTimestamp()) { return session.getStartTime(); } ISOChronology localChronology = getChronology(session.getTimeZoneKey()); return localChronology.getZone().convertUTCToLocal(session.getStartTime()); }
public static Session toSession(ConnectorTransactionHandle transactionHandle, ConnectorSession session) { TransactionId transactionId = ((GlobalSystemTransactionHandle) transactionHandle).getTransactionId(); ConnectorIdentity connectorIdentity = session.getIdentity(); Identity identity = new Identity(connectorIdentity.getUser(), connectorIdentity.getPrincipal()); return Session.builder(new SessionPropertyManager(SYSTEM_SESSION_PROPERTIES)) .setQueryId(new QueryId(session.getQueryId())) .setTransactionId(transactionId) .setCatalog("catalog") .setSchema("schema") .setPath(new SqlPath(Optional.of("path"))) .setIdentity(identity) .setTimeZoneKey(session.getTimeZoneKey()) .setLocale(session.getLocale()) .setStartTime(session.getStartTime()) .build(); } }
@Description("current timestamp with time zone") @ScalarFunction(value = "current_timestamp", alias = "now") @SqlType(StandardTypes.TIMESTAMP_WITH_TIME_ZONE) public static long currentTimestamp(ConnectorSession session) { return packDateTimeWithZone(session.getStartTime(), session.getTimeZoneKey()); }