@Override public Fields getSourceFields() { return original.getSourceFields(); }
@Override public Fields getSourceFields() { return original.getSourceFields(); }
@Override public Fields apply( Tap input ) { return input.getSourceFields(); } } );
/** * Reads the {@link Tuple Tuples} from the {@link Tap} and returns them wrapped in a {@link Data} instance whose * {@link Fields} confirm to those supplied by {@link Tap#getSourceFields()}. */ Data read() throws IOException { TupleEntryIterator tuples = null; try { Class<?> tapConfigClass = TapTypeUtil.getTapConfigClass(source); if (Configuration.class.equals(tapConfigClass)) { tuples = getHadoopTupleEntryIterator(); } else if (Properties.class.equals(tapConfigClass)) { tuples = getLocalTupleEntryIterator(); } else { throw new IllegalArgumentException("Unsupported tap type: " + source.getClass()); } List<Tuple> resultTuples = new ArrayList<Tuple>(); while (tuples.hasNext()) { resultTuples.add(new Tuple(tuples.next().getTuple())); } return new Data(source.getSourceFields(), Collections.unmodifiableList(resultTuples)); } finally { if (tuples != null) { tuples.close(); } } }
private DataSet<Tuple> translateSource(FlowProcess flowProcess, ExecutionEnvironment env, FlowNode node, int dop) { Tap tap = this.getSingle(node.getSourceTaps()); JobConf tapConfig = new JobConf(this.getNodeConfig(node)); tap.sourceConfInit(flowProcess, tapConfig); tapConfig.set( "cascading.step.source", Tap.id( tap ) ); Fields outFields = tap.getSourceFields(); registerKryoTypes(outFields); JobConf sourceConfig = new JobConf(this.getNodeConfig(node)); MultiInputFormat.addInputFormat(sourceConfig, tapConfig); DataSet<Tuple> src = env .createInput(new TapInputFormat(node), new TupleTypeInfo(outFields)) .name(tap.getIdentifier()) .setParallelism(dop) .withParameters(FlinkConfigConverter.toFlinkConfig(new Configuration(sourceConfig))); return src; }
( isSource() && getSourceFields().equals( Fields.UNKNOWN ) || isSink() && getSinkFields().equals( Fields.ALL ) ) ) return new Scope( incomingFields ); return new Scope( getSinkFields() ); return new Scope( getSourceFields() );
public Enumerator enumerator() { PlatformBroker platformBroker = getPlatformBroker(); Properties properties = platformBroker.getProperties(); Optiq.writeSQLPlan( properties, Misc.createUniqueName(), getVolcanoPlanner() ); FlowProcess flowProcess = platformBroker.getFlowProcess(); SchemaCatalogManager schemaCatalog = platformBroker.getCatalogManager(); Tap tap = schemaCatalog.createTapFor( getTableDef(), SinkMode.KEEP ); int size = tap.getSourceFields().size(); Type[] types = new Type[ size ]; for( int i = 0; i < size; i++ ) types[ i ] = getPhysType().fieldClass( i ); int maxRows = getMaxRows( properties ); if( size == 1 ) return new TapObjectEnumerator( maxRows, types, flowProcess, tap ); else return new TapArrayEnumerator( maxRows, types, flowProcess, tap ); }
protected void performTest( String inputData, Fields predictedFields, Fields expectedFields, EnsembleSpec<TreeSpec> ensembleSpec ) throws IOException { Pipe pipe = new Pipe( "head" ); pipe = new Discard( pipe, predictedFields ); pipe = new ParallelEnsembleAssembly( pipe, ensembleSpec ); pipe = new Pipe( "tail", pipe ); Tap source = getPlatform().getDelimitedFile( expectedFields.append( predictedFields ), true, ",", "\"", DATA_PATH + inputData, SinkMode.KEEP ); Tap sink = getPlatform().getDelimitedFile( Fields.ALL, true, ",", "\"", getResultPath(), SinkMode.REPLACE ); FlowDef flowDef = FlowDef.flowDef() .addSource( "head", source ) .addSink( "tail", sink ) .addTail( pipe ); Flow flow = getPlatform().getFlowConnector().connect( flowDef ); flow.writeDOT( getFlowPlanPath() + "/plan.dot" ); flow.complete(); Fields sourceSelector = source.getSourceFields(); Fields sinkSelector = sink.getSinkFields(); LOG.debug( "source select = {}", sourceSelector.printVerbose() ); LOG.debug( "sink select = {}", sinkSelector.printVerbose() ); List<Tuple> sourceTuples = asList( flow, source, sourceSelector ); List<Tuple> sinkTuples = asList( flow, sink, sinkSelector ); assertEquals( sourceTuples, sinkTuples, 0.000001d ); } }