@Override protected void addPropertiesTo( Properties properties ) { setApplicationID( properties ); setApplicationName( properties, name ); setApplicationVersion( properties, version ); addApplicationTag( properties, getTags() ); addApplicationFramework( properties, getFrameworks() ); setApplicationJarClass( properties, jarClass ); setApplicationJarPath( properties, jarPath ); } }
@Override public void initialize( FlowConnector flowConnector, Map<Object, Object> properties ) { super.initialize( flowConnector, properties ); defaultConfiguration = TezUtil.createTezConf( properties, createConfiguration( properties ) ); intermediateSchemeClass = flowConnector.getIntermediateSchemeClass( properties ); String applicationJarPath = AppProps.getApplicationJarPath( properties ); if( applicationJarPath == null ) { Class type = AppProps.getApplicationJarClass( properties ); if( type == null ) type = HadoopUtil.findMainClass( Hadoop2TezPlanner.class ); if( type != null ) applicationJarPath = Util.findContainingJar( type ); AppProps.setApplicationJarPath( properties, applicationJarPath ); } if( applicationJarPath != null ) LOG.info( "using application jar: {}", applicationJarPath ); else LOG.info( "using application jar not provided, see cascading.property.AppProps for more information" ); }
private String makeAppName( Map<Object, Object> properties ) { if( properties == null ) return null; String name = AppProps.getApplicationName( properties ); if( name != null ) return name; return Util.findName( AppProps.getApplicationJarPath( properties ) ); }
private String makeAppVersion( Map<Object, Object> properties ) { if( properties == null ) return null; String name = AppProps.getApplicationVersion( properties ); if( name != null ) return name; return Util.findVersion( AppProps.getApplicationJarPath( properties ) ); }
AppProps.setApplicationJarPath(props, ResourceUtils.decode(jar.getURI().toString())); AppProps.setApplicationJarClass(props, jarClass);
@Test public void testApplicationJarPath() { Map<Object, Object> map = new HashMap<>(); AppProps.setApplicationJarClass( map, AppPropsTest.class ); assertEquals( AppPropsTest.class, AppProps.getApplicationJarClass( map ) ); Properties properties = new Properties(); AppProps.setApplicationJarClass( properties, AppPropsTest.class ); assertEquals( AppPropsTest.class, AppProps.getApplicationJarClass( properties ) ); } }
protected Properties createProperties() { Properties props = new Properties(); props.setProperty( "mapred.reduce.tasks.speculative.execution", "false" ); props.setProperty( "mapred.map.tasks.speculative.execution", "false" ); AppProps.setApplicationJarClass( props, getClass() ); AppProps.setApplicationName( props, getClass().getName() ); props.setProperty( FlowRuntimeProps.GATHER_PARTITIONS, "1" ); return props; }
@Test public void testCopyConfig() throws Exception { Tap source = new Lfs( new TextLine(), "input/path" ); Tap sink = new Hfs( new TextLine(), "output/path", SinkMode.REPLACE ); Pipe pipe = new Pipe( "test" ); Configuration conf = ( (BaseHadoopPlatform) getPlatform() ).getConfiguration(); conf.set( AppProps.APP_NAME, "testname" ); AppProps props = AppProps.appProps().setVersion( "1.2.3" ); Properties properties = props.buildProperties( conf ); // convert job conf to properties instance Flow flow = getPlatform().getFlowConnector( properties ).connect( source, sink, pipe ); assertEquals( "testname", flow.getProperty( AppProps.APP_NAME ) ); assertEquals( "1.2.3", flow.getProperty( AppProps.APP_VERSION ) ); }
public static void main(String[] args){ String usersPath = args[ 0 ]; String transactionsPath = args[ 1 ]; String outputPath = args[ 2 ]; Properties properties = new Properties(); AppProps.setApplicationJarClass( properties, LocationsNumForAProduct.class ); FlowConnector flowConnector = new Hadoop2MR1FlowConnector( properties ); Fields users = new Fields( "id", "email", "language", "location" ); Tap usersTap = new Hfs( new TextDelimited( users, false, "\t" ), usersPath ); Fields transactions = new Fields( "transaction-id", "product-id", "user-id", "purchase-amount", "item-description" ); Tap transactionsTap = new Hfs( new TextDelimited( transactions, false, "\t" ), transactionsPath ); Tap outputTap = new Hfs( new TextDelimited( false, "\t" ), outputPath ); FlowDef flowDef = createWorkflow(usersTap, transactionsTap, outputTap); flowConnector.connect( flowDef ).complete(); } }
/** * Instantiates a new Lingual flow factory. * * @param platformBroker the platform broker * @param name the name * @param tail */ public LingualFlowFactory( PlatformBroker platformBroker, LingualConnection lingualConnection, String name, Pipe tail ) { super( new Properties( platformBroker.getProperties() ), name ); this.platformBroker = platformBroker; this.catalog = platformBroker.getCatalogManager(); this.lingualConnection = lingualConnection; this.tail = tail; AppProps.addApplicationFramework( getProperties(), Version.getName() + ":" + Version.getVersionString() ); AppProps.addApplicationTag( getProperties(), getProperties().getProperty( Driver.TAGS_PROP ) ); }
@Test public void testCopyConfig() throws Exception { Tap source = new Lfs( new TextLine(), "input/path" ); Tap sink = new Hfs( new TextLine(), "output/path", SinkMode.REPLACE ); Pipe pipe = new Pipe( "test" ); Configuration conf = ( (BaseHadoopPlatform) getPlatform() ).getConfiguration(); conf.set( AppProps.APP_NAME, "testname" ); AppProps props = AppProps.appProps().setVersion( "1.2.3" ); Properties properties = props.buildProperties( conf ); // convert job conf to properties instance Flow flow = getPlatform().getFlowConnector( properties ).connect( source, sink, pipe ); assertEquals( "testname", flow.getProperty( AppProps.APP_NAME ) ); assertEquals( "1.2.3", flow.getProperty( AppProps.APP_VERSION ) ); }
AppProps.setApplicationJarClass(props, Main.class); FlowConnector flowConnector = new LocalFlowConnector(props);
@Override public void initialize( FlowConnector flowConnector, Map<Object, Object> properties ) { super.initialize( flowConnector, properties ); defaultConfiguration = TezUtil.createTezConf( properties, createConfiguration( properties ) ); intermediateSchemeClass = flowConnector.getIntermediateSchemeClass( properties ); String applicationJarPath = AppProps.getApplicationJarPath( properties ); if( applicationJarPath == null ) { Class type = AppProps.getApplicationJarClass( properties ); if( type == null ) type = HadoopUtil.findMainClass( Hadoop2TezPlanner.class ); if( type != null ) applicationJarPath = Util.findContainingJar( type ); AppProps.setApplicationJarPath( properties, applicationJarPath ); } if( applicationJarPath != null ) LOG.info( "using application jar: {}", applicationJarPath ); else LOG.info( "using application jar not provided, see cascading.property.AppProps for more information" ); }
@Override public void initialize( FlowConnector flowConnector, Map<Object, Object> properties ) { super.initialize( flowConnector, properties ); defaultJobConf = HadoopUtil.createJobConf( properties, createJobConf( properties ) ); checkPlatform( defaultJobConf ); intermediateSchemeClass = flowConnector.getIntermediateSchemeClass( properties ); Class type = AppProps.getApplicationJarClass( properties ); if( defaultJobConf.getJar() == null && type != null ) defaultJobConf.setJarByClass( type ); String path = AppProps.getApplicationJarPath( properties ); if( defaultJobConf.getJar() == null && path != null ) defaultJobConf.setJar( path ); if( defaultJobConf.getJar() == null ) defaultJobConf.setJarByClass( HadoopUtil.findMainClass( HadoopPlanner.class ) ); AppProps.setApplicationJarPath( properties, defaultJobConf.getJar() ); LOG.info( "using application jar: {}", defaultJobConf.getJar() ); }
@Override public void initialize( FlowConnector flowConnector, Map<Object, Object> properties ) { super.initialize( flowConnector, properties ); defaultJobConf = HadoopUtil.createJobConf( properties, createJobConf( properties ) ); checkPlatform( defaultJobConf ); intermediateSchemeClass = flowConnector.getIntermediateSchemeClass( properties ); Class type = AppProps.getApplicationJarClass( properties ); if( defaultJobConf.getJar() == null && type != null ) defaultJobConf.setJarByClass( type ); String path = AppProps.getApplicationJarPath( properties ); if( defaultJobConf.getJar() == null && path != null ) defaultJobConf.setJar( path ); if( defaultJobConf.getJar() == null ) defaultJobConf.setJarByClass( HadoopUtil.findMainClass( HadoopPlanner.class ) ); AppProps.setApplicationJarPath( properties, defaultJobConf.getJar() ); LOG.info( "using application jar: {}", defaultJobConf.getJar() ); }