public CompareFunctionsProcessor(String functionName) { this.success = false; this.functionName = functionName; this.isEqualityFn = COMPARE_FUNCTIONS_TRANSPOSE_MAP.containsKey(functionName) && COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName).equals(functionName); this.isRowKeyPrefixComparison = false; this.sortOrderAscending = true; }
public static LogicalExpression create(String functionName, List<LogicalExpression> expressions) throws ExpressionValidationException { // logger.debug("Requesting generation of new function with name {}.", // functionName); if (!FUNCTION_MAP.containsKey(functionName)) { throw new ExpressionValidationException(String.format("Unknown function with name '%s'", functionName)); } try { return FUNCTION_MAP.get(functionName).newInstance(expressions); } catch (Exception e) { throw new ExpressionValidationException("Failure while attempting to build type of " + functionName, e); } }
@VisibleForTesting public static Direction getOrderingSpecFromString(String strDirection) { Direction dir = null; if (strDirection != null) { dir = DRILL_TO_CALCITE_DIR_MAPPING.get(strDirection.toUpperCase()); } if (dir != null || strDirection == null) { return filterDrillSupportedDirections(dir); } else { throw new DrillRuntimeException( "Unknown <ordering specification> string (not \"ASC\", \"DESC\", " + "or null): \"" + strDirection + "\"" ); } }
@VisibleForTesting public static NullDirection getNullOrderingFromString( String strNullOrdering ) { NullDirection nullDir = null; if (strNullOrdering != null) { nullDir = DRILL_TO_CALCITE_NULL_DIR_MAPPING.get(strNullOrdering.toUpperCase()); } if (nullDir != null || strNullOrdering == null) { return filterDrillSupportedNullDirections(nullDir); } else { throw new DrillRuntimeException( "Internal error: Unknown <null ordering> string (not " + "\"" + NULLS_FIRST + "\", " + "\"" + NULLS_LAST + "\", or " + "\"" + NULLS_UNSPECIFIED + "\" or null): " + "\"" + strNullOrdering + "\"" ); } }
private List<KafkaPartitionScanSpec> createScanSpecForTimestamp(String functionName, Long fieldValue) { List<KafkaPartitionScanSpec> scanSpec = Lists.newArrayList(); Map<TopicPartition, Long> timesValMap = Maps.newHashMap(); ImmutableSet<TopicPartition> topicPartitions = fullScanSpec.keySet(); for(TopicPartition partitions : topicPartitions) { timesValMap.put(partitions, functionName.equals("greater_than") ? fieldValue+1 : fieldValue); } Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestamp = kafkaConsumer.offsetsForTimes(timesValMap); for(TopicPartition tp : topicPartitions) { OffsetAndTimestamp value = offsetAndTimestamp.get(tp); //OffsetAndTimestamp is null if there is no offset greater or equal to requested timestamp if(value == null) { scanSpec.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getEndOffset(), fullScanSpec.get(tp).getEndOffset())); } else { scanSpec.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), value.offset(), fullScanSpec.get(tp).getEndOffset())); } } return scanSpec; }
private static CompareFunctionsProcessor processWithEvaluator(FunctionCall call, CompareFunctionsProcessor evaluator) { String functionName = call.getName(); LogicalExpression nameArg = call.args.get(0); LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null; if (valueArg != null) { if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) { LogicalExpression swapArg = valueArg; valueArg = nameArg; nameArg = swapArg; evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName); } evaluator.success = nameArg.accept(evaluator, valueArg); } return evaluator; }
public static KafkaNodeProcessor process(FunctionCall call) { String functionName = call.getName(); LogicalExpression nameArg = call.args.get(0); LogicalExpression valueArg = call.args.size() >= 2? call.args.get(1) : null; KafkaNodeProcessor evaluator = new KafkaNodeProcessor(functionName); if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) { LogicalExpression swapArg = valueArg; valueArg = nameArg; nameArg = swapArg; evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName); } evaluator.success = nameArg.accept(evaluator, valueArg); return evaluator; }
public List<KafkaPartitionScanSpec> parseTree() { ImmutableMap.Builder<TopicPartition, KafkaPartitionScanSpec> builder = ImmutableMap.builder(); for(KafkaPartitionScanSpec scanSpec : groupScan.getPartitionScanSpecList()) { builder.put(new TopicPartition(scanSpec.getTopicName(), scanSpec.getPartitionId()), scanSpec); } fullScanSpec = builder.build(); List<KafkaPartitionScanSpec> pushdownSpec = le.accept(this, null); /* Non-existing / invalid partitions may result in empty scan spec. This results in a "ScanBatch" with no reader. DRILL currently requires at least one reader to be present in a scan batch. */ if(pushdownSpec != null && pushdownSpec.isEmpty()) { TopicPartition firstPartition = new TopicPartition(groupScan.getKafkaScanSpec().getTopicName(), 0); KafkaPartitionScanSpec emptySpec = new KafkaPartitionScanSpec(firstPartition.topic(),firstPartition.partition(), fullScanSpec.get(firstPartition).getEndOffset(), fullScanSpec.get(firstPartition).getEndOffset()); pushdownSpec.add(emptySpec); } return pushdownSpec; }
case "equal": for(TopicPartition tp : topicPartitions) { if(fieldValue < fullScanSpec.get(tp).getStartOffset()) { fullScanSpec.get(tp).getEndOffset(), fullScanSpec.get(tp).getEndOffset())); } else { long val = Math.min(fieldValue, fullScanSpec.get(tp).getEndOffset()); long nextVal = Math.min(val+1, fullScanSpec.get(tp).getEndOffset()); scanSpec.add(new KafkaPartitionScanSpec(tp.topic(), tp.partition(), val, nextVal)); scanSpec.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), val, fullScanSpec.get(tp).getEndOffset())); scanSpec.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), val, fullScanSpec.get(tp).getEndOffset())); fullScanSpec.get(tp).getStartOffset(), val)); fullScanSpec.get(tp).getStartOffset(), val));
scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset())); scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset())); scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset())); scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset())); scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset())); scanSpecList.add( new KafkaPartitionScanSpec(tp.topic(), tp.partition(), fullScanSpec.get(tp).getStartOffset(), fullScanSpec.get(tp).getEndOffset()));
@Override public void success(SaslMessage value, ByteBuf buffer) { logger.trace("Server responded with message of type: {}", value.getStatus()); final SaslChallengeProcessor processor = CHALLENGE_PROCESSORS.get(value.getStatus()); if (processor == null) { completionListener.failed(RpcException.mapException(
protected static <T extends CompareFunctionsProcessor> T createFunctionsProcessorInstanceInternal(FunctionCall call, boolean nullComparatorSupported, T evaluator) { LogicalExpression nameArg = call.args.get(0); LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null; if (valueArg != null) { // binary function if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) { LogicalExpression swapArg = valueArg; valueArg = nameArg; nameArg = swapArg; evaluator.setFunctionName(COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(evaluator.getFunctionName())); } evaluator.setSuccess(nameArg.accept(evaluator, valueArg)); } else if (nullComparatorSupported && call.args.get(0) instanceof SchemaPath) { evaluator.setSuccess(true); evaluator.setPath((SchemaPath) nameArg); } return evaluator; }
final int width = meta.getPrecision(i); final int scale = meta.getScale(i); MinorType minorType = JDBC_TYPE_MAPPINGS.get(jdbcType); if (minorType == null) {
final long size = getLongArg(call.args.get(2)); queryCond = MapRDBImpl.newCondition() .sizeOf(fieldName, STRING_TO_RELOP.get(relOp), size) .build(); break;
/** * Returns an immutable list of the values for the given key. If no mappings * in the multimap have the provided key, an empty immutable list is * returned. The values are in the same order as the parameters used to build * this multimap. */ @Override public ImmutableList<V> get(@Nullable K key) { // This cast is safe as its type is known in constructor. ImmutableList<V> list = (ImmutableList<V>) map.get(key); return (list == null) ? ImmutableList.<V>of() : list; }
/** * Given a Calcite's SqlTypeName, return a Drill's corresponding TypeProtos.MinorType */ public static TypeProtos.MinorType getDrillTypeFromCalciteType(final SqlTypeName sqlTypeName) { if(!CALCITE_TO_DRILL_MAPPING.containsKey(sqlTypeName)) { return TypeProtos.MinorType.LATE; } return CALCITE_TO_DRILL_MAPPING.get(sqlTypeName); }
public CompareFunctionsProcessor(String functionName) { this.success = false; this.functionName = functionName; this.isEqualityFn = COMPARE_FUNCTIONS_TRANSPOSE_MAP.containsKey(functionName) && COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName).equals(functionName); this.isRowKeyPrefixComparison = false; this.sortOrderAscending = true; }
@Override public V get(@Nullable Object key) { Integer keyIndex = keyToIndex().get(key); return (keyIndex == null) ? null : getValue(keyIndex); }
@Override public BasicValue newOperation(final AbstractInsnNode insn) throws AnalyzerException { if (insn.getOpcode() == Opcodes.NEW) { final TypeInsnNode t = (TypeInsnNode) insn; // if this is for a holder class, we'll replace it final ValueHolderIden iden = HOLDERS.get(t.desc); if (iden != null) { return ReplacingBasicValue.create(Type.getObjectType(t.desc), iden, index++, valueList); } } return super.newOperation(insn); }