/** * Constructs a new tuple source builder that will provide {@link Tuple Tuples} that contain values consistent with * the declared {@link Fields}. */ public DataBuilder(Fields fields) { this(fields, fields.getTypesClasses()); }
public Class[] getTypesFor( Fields fields ) { if( areTypesIgnored() || fields == null ) return null; return fields.getTypesClasses(); }
public Class[] getTypesFor( Fields fields ) { if( areTypesIgnored() || fields == null ) return null; return fields.getTypesClasses(); }
private static boolean ifCoGroupAndKeysHaveCommonTypes( ProcessLogger processLogger, FlowElement flowElement, Map<Integer, Fields> resolvedKeyFields ) { if( !( flowElement instanceof CoGroup ) ) return true; if( resolvedKeyFields == null || resolvedKeyFields.size() < 2 ) return true; Iterator<Map.Entry<Integer, Fields>> iterator = resolvedKeyFields.entrySet().iterator(); Fields fields = iterator.next().getValue(); while( iterator.hasNext() ) { Fields next = iterator.next().getValue(); if( !Arrays.equals( fields.getTypesClasses(), next.getTypesClasses() ) ) { processLogger.logWarn( "unable to perform: {}, on mismatched join types and optimize serialization with type exclusion, fields: {} & {}", flowElement, fields, next ); return false; } } return true; }
private static boolean ifCoGroupAndKeysHaveCommonTypes( ProcessLogger processLogger, FlowElement flowElement, Map<Integer, Fields> resolvedKeyFields ) { if( !( flowElement instanceof CoGroup ) ) return true; if( resolvedKeyFields == null || resolvedKeyFields.size() < 2 ) return true; Iterator<Map.Entry<Integer, Fields>> iterator = resolvedKeyFields.entrySet().iterator(); Fields fields = iterator.next().getValue(); while( iterator.hasNext() ) { Fields next = iterator.next().getValue(); if( !Arrays.equals( fields.getTypesClasses(), next.getTypesClasses() ) ) { processLogger.logWarn( "unable to perform: {}, on mismatched join types and optimize serialization with type exclusion, fields: {} & {}", flowElement, fields, next ); return false; } } return true; }
public Map<Integer, Class[]> getValueTypeMap() { if( areTypesIgnored() || getValueFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getValueFieldsMap().entrySet() ) map.put( entry.getKey(), entry.getValue().getTypesClasses() ); return map; }
public Map<Integer, Class[]> getKeyTypeMap() { if( areTypesIgnored() || getKeyFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getKeyFieldsMap().entrySet() ) map.put( entry.getKey(), entry.getValue().getTypesClasses() ); return map; }
public Map<Integer, Class[]> getKeyTypeMap() { if( areTypesIgnored() || getKeyFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getKeyFieldsMap().entrySet() ) map.put( entry.getKey(), entry.getValue().getTypesClasses() ); return map; }
public Map<Integer, Class[]> getValueTypeMap() { if( areTypesIgnored() || getValueFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getValueFieldsMap().entrySet() ) map.put( entry.getKey(), entry.getValue().getTypesClasses() ); return map; }
private void registerKryoTypes(Fields fields) { if(fields.hasTypes()) { Class[] fieldTypeClasses = fields.getTypesClasses(); for(Class fieldTypeClass : fieldTypeClasses) { if(!fieldTypeClass.isPrimitive() && !fieldTypeClass.equals(String.class) && !Writable.class.isAssignableFrom(fieldTypeClass)) { // register type if it is neither a primitive, String, or Writable env.getConfig().registerKryoType(fieldTypeClass); } } } }
public Map<Integer, Class[]> getMaskedValueTypeMap() { if( areTypesIgnored() || getValueFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Fields> keyFieldsMap = getKeyFieldsMap(); if( keyFieldsMap == null || keyFieldsMap.isEmpty() ) return getValueTypeMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getValueFieldsMap().entrySet() ) { Integer ordinal = entry.getKey(); Fields valueFields = entry.getValue(); Fields keyFields = keyFieldsMap.get( ordinal ); map.put( ordinal, maskVoid( valueFields, keyFields ).getTypesClasses() ); } return map; }
public Map<Integer, Class[]> getMaskedValueTypeMap() { if( areTypesIgnored() || getValueFieldsMap() == null ) return Collections.emptyMap(); Map<Integer, Fields> keyFieldsMap = getKeyFieldsMap(); if( keyFieldsMap == null || keyFieldsMap.isEmpty() ) return getValueTypeMap(); Map<Integer, Class[]> map = new HashMap<>(); for( Map.Entry<Integer, Fields> entry : getValueFieldsMap().entrySet() ) { Integer ordinal = entry.getKey(); Fields valueFields = entry.getValue(); Fields keyFields = keyFieldsMap.get( ordinal ); map.put( ordinal, maskVoid( valueFields, keyFields ).getTypesClasses() ); } return map; }
public TupleTypeInfo(Fields schema) { super(Tuple.class); this.schema = schema; this.fieldIndexes = new HashMap<String, Integer>(); if(schema.isDefined()) { this.length = schema.size(); this.fieldTypes = new LinkedHashMap<String, FieldTypeInfo>(this.length); Comparator[] comps = schema.getComparators(); Class[] typeClasses = schema.getTypesClasses(); for(int i=0; i<length; i++) { String fieldName = getFieldName(i); FieldTypeInfo fieldType = getFieldTypeInfo(i, typeClasses, comps); this.fieldTypes.put(fieldName, fieldType); this.fieldIndexes.put(fieldName, i); } } else { if(schema.isUnknown()) { this.length = -1; this.fieldTypes = new LinkedHashMap<String, FieldTypeInfo>(16); this.fieldTypes.put("0", new FieldTypeInfo()); this.fieldIndexes.put("0", 0); } else { throw new IllegalArgumentException("Unsupported Fields: "+schema); } } }
Class[] keyTypes = keyFields.getTypesClasses();
private static Pipe addFilter( RelOptCluster cluster, RexProgram program, Pipe pipe ) { final Fields incomingFields = createTypedFields( cluster, program.getInputRowType(), false ); BlockBuilder statements = new BlockBuilder(); Expression condition = RexToLixTranslator.translateCondition( program, (JavaTypeFactory) cluster.getTypeFactory(), statements, new RexToLixTranslator.InputGetter() { public Expression field( BlockBuilder list, int index ) { return Expressions.parameter( incomingFields.getTypeClass( index ), incomingFields.get( index ).toString() ); } } ); // if condition is constant and true, we don't need an expression filter to keep it around boolean keepsAllRecords = condition instanceof ConstantExpression && Boolean.TRUE.equals( ( (ConstantExpression) condition ).value ); if( keepsAllRecords ) return pipe; // create a filter to remove records that don't meet the expression Expression nullToFalse = Expressions.call( Functions.class, "falseIfNull", condition ); Expression not = Expressions.not( nullToFalse ); // matches #isRemove semantics in Filter statements.add( Expressions.return_( null, not ) ); BlockStatement block = statements.toBlock(); String expression = Expressions.toString( block ); LOG.debug( "filter parameters: {}", incomingFields ); LOG.debug( "filter expression: {}", expression ); Filter expressionFilter = new ScriptFilter( expression, incomingFields.getTypesClasses() ); // handles coercions return new Each( pipe, expressionFilter ); }
private static Pipe addFunction( RelOptCluster cluster, RexProgram program, Pipe pipe ) { final Fields incomingFields = createTypedFields( cluster, program.getInputRowType(), false ); BlockBuilder statements = new BlockBuilder(); List<Expression> expressionList = RexToLixTranslator.translateProjects( program, (JavaTypeFactory) cluster.getTypeFactory(), statements, new RexToLixTranslator.InputGetter() { public Expression field( BlockBuilder list, int index ) { final Type type = incomingFields.getType( index ); final String name = incomingFields.get( index ).toString(); return Expressions.parameter( type, name ); } } ); Expression record = Expressions.newArrayInit( Object.class, expressionList ); record = Expressions.new_( getConstructor(), record ); statements.add( Expressions.return_( null, record ) ); BlockStatement block = statements.toBlock(); String expression = Expressions.toString( block ); Fields outgoingFields = createTypedFields( cluster, program.getOutputRowType(), false ); LOG.debug( "function parameters: {}", program.getInputRowType() ); LOG.debug( "function results: {}", outgoingFields ); LOG.debug( "function expression: {}", expression ); Function scriptFunction = new ScriptTupleFunction( outgoingFields, expression, incomingFields.getTypesClasses() ); return new Each( pipe, scriptFunction, Fields.RESULTS ); }
context.parameterTypes = argumentFields.getTypesClasses();
context.parameterTypes = argumentFields.getTypesClasses();
@Test public void testStatic() throws IOException { String statement = "select *\n" + "from \"example\".\"sales_fact_1997\" as s\n" + "join \"example\".\"employee\" as e\n" + "on e.\"EMPID\" = s.\"CUST_ID\""; Fields employeeFields = new Fields( "EMPID", "NAME" ).applyTypes( Integer.TYPE, String.class ); Fields salesFields = new Fields( "CUST_ID", "PROD_ID" ).applyTypes( Integer.TYPE, Integer.TYPE ); Tap empTap = getPlatform().getDelimitedFile( employeeFields, true, ",", "\"", employeeFields.getTypesClasses(), SIMPLE_EMPLOYEE_TABLE, SinkMode.KEEP ); Tap salesTap = getPlatform().getDelimitedFile( salesFields, true, ",", "\"", salesFields.getTypesClasses(), SIMPLE_SALES_FACT_TABLE, SinkMode.KEEP ); Tap resultsTap = getPlatform().getDelimitedFile( Fields.ALL, true, ",", "\"", null, getOutputPath( getTestName() ), SinkMode.REPLACE ); FlowDef flowDef = FlowDef.flowDef() .setName( "sql flow" ) .addSource( "employee", empTap ) .addSource( "sales_fact_1997", salesTap ) .addSink( "results", resultsTap ); SQLPlanner sqlPlanner = new SQLPlanner() .setDefaultSchema( "example" ) .setSql( statement ); flowDef.addAssemblyPlanner( sqlPlanner ); Flow flow = getPlatform().getFlowConnector().connect( flowDef ); validateFlowDescriptor( flow, statement ); flow.complete(); validateLength( flow, 2 ); }
@Test public void testOrderBy() throws IOException { String statement = "select *\n" + "from \"example\".\"sales_fact_1997\" as s\n" + "join \"example\".\"employee\" as e\n" + "on e.\"EMPID\" = s.\"CUST_ID\"" + " ORDER BY s.CUST_ID"; Fields employeeFields = new Fields( "EMPID", "NAME" ).applyTypes( Integer.TYPE, String.class ); Fields salesFields = new Fields( "CUST_ID", "PROD_ID" ).applyTypes( Integer.TYPE, Integer.TYPE ); Tap empTap = getPlatform().getDelimitedFile( employeeFields, true, ",", "\"", employeeFields.getTypesClasses(), SIMPLE_EMPLOYEE_TABLE, SinkMode.KEEP ); Tap salesTap = getPlatform().getDelimitedFile( salesFields, true, ",", "\"", salesFields.getTypesClasses(), SIMPLE_SALES_FACT_TABLE, SinkMode.KEEP ); Tap resultsTap = getPlatform().getDelimitedFile( Fields.ALL, true, ",", "\"", null, getOutputPath( getTestName() ), SinkMode.REPLACE ); FlowDef flowDef = FlowDef.flowDef() .setName( "sql flow" ) .addSource( "employee", empTap ) .addSource( "sales_fact_1997", salesTap ) .addSink( "results", resultsTap ); SQLPlanner sqlPlanner = new SQLPlanner() .setDefaultSchema( "example" ) .setSql( statement ); flowDef.addAssemblyPlanner( sqlPlanner ); Flow flow = getPlatform().getFlowConnector().connect( flowDef ); validateFlowDescriptor( flow, statement ); flow.complete(); validateLength( flow, 2 ); }