/** * Gets the global job parameter value associated with the given key as a string. * * @param key key pointing to the associated value * @param defaultValue default value which is returned in case global job parameter is null * or there is no value associated with the given key * @return (default) value associated with the given key */ public String getJobParameter(String key, String defaultValue) { final GlobalJobParameters conf = context.getExecutionConfig().getGlobalJobParameters(); if (conf != null && conf.toMap().containsKey(key)) { return conf.toMap().get(key); } else { return defaultValue; } } }
public ArchivedExecutionConfig(ExecutionConfig ec) { executionMode = ec.getExecutionMode().name(); if (ec.getRestartStrategy() != null) { restartStrategyDescription = ec.getRestartStrategy().getDescription(); } else { restartStrategyDescription = "default"; } parallelism = ec.getParallelism(); objectReuseEnabled = ec.isObjectReuseEnabled(); if (ec.getGlobalJobParameters() != null && ec.getGlobalJobParameters().toMap() != null) { globalJobParameters = ec.getGlobalJobParameters().toMap(); } else { globalJobParameters = Collections.emptyMap(); } }
String scriptName = PythonStreamExecutionEnvironment.PythonJobParameters.getScriptName(context.getExecutionConfig().getGlobalJobParameters());
for (Map.Entry<String, String> entry : env.getConfig().getGlobalJobParameters().toMap().entrySet()) { json.writeStringField(entry.getKey(), entry.getValue());
private boolean isCollectMetricEnabled() { ExecutionConfig.GlobalJobParameters parameters = getExecutionConfig() .getGlobalJobParameters(); if (parameters != null) { String metricsConfig = parameters.toMap().get(METRICS_CONF_KEY); return metricsConfig != null && metricsConfig .equalsIgnoreCase(METRICS_CONF_VALUE); } else { return false; } }
@Override public TypeSerializer<T> createSerializer(ExecutionConfig config) { if (config.getGlobalJobParameters() != null) { Configuration parameters = new Configuration(); parameters.addAll(config.getGlobalJobParameters().toMap()); if (parameters.getBoolean(StateUtil.STATE_BACKEND_ON_HEAP, true)) { // using heap statebackend // use non-ordered serialization here when String/BinaryString/byte[] if (heapSerializer != null) { return this.heapSerializer; } } } return this.serializer; }
/** * Gets the global job parameter value associated with the given key as a string. * * @param key key pointing to the associated value * @param voidaultValue voidault value which is returned in case global job parameter is null * or there is no value associated with the given key * @return (public voidault) value associated with the given key */ @Override public String getJobParameter(String key, String voidaultValue) { ExecutionConfig.GlobalJobParameters conf = context.getExecutionConfig().getGlobalJobParameters(); if (conf != null && conf.toMap().containsKey(key)) { return conf.toMap().get(key); } else { return voidaultValue; } } }
public ArchivedExecutionConfig(ExecutionConfig ec) { executionMode = ec.getExecutionMode().name(); if (ec.getRestartStrategy() != null) { restartStrategyDescription = ec.getRestartStrategy().getDescription(); } else { restartStrategyDescription = "default"; } parallelism = ec.getParallelism(); objectReuseEnabled = ec.isObjectReuseEnabled(); if (ec.getGlobalJobParameters() != null && ec.getGlobalJobParameters().toMap() != null) { globalJobParameters = ec.getGlobalJobParameters().toMap(); } else { globalJobParameters = Collections.emptyMap(); } }
public ArchivedExecutionConfig(ExecutionConfig ec) { executionMode = ec.getExecutionMode().name(); if (ec.getRestartStrategy() != null) { restartStrategyDescription = ec.getRestartStrategy().getDescription(); } else { restartStrategyDescription = "default"; } parallelism = ec.getParallelism(); objectReuseEnabled = ec.isObjectReuseEnabled(); if (ec.getGlobalJobParameters() != null && ec.getGlobalJobParameters().toMap() != null) { globalJobParameters = ec.getGlobalJobParameters().toMap(); } else { globalJobParameters = Collections.emptyMap(); } }
public static DataStreamSource<Metrics> buildSource(StreamExecutionEnvironment env) throws IllegalAccessException { ParameterTool parameter = (ParameterTool) env.getConfig().getGlobalJobParameters(); String topic = parameter.getRequired(PropertiesConstants.METRICS_TOPIC); Long time = parameter.getLong(PropertiesConstants.CONSUMER_FROM_TIME, 0L); return buildSource(env, topic, time); }
public Configuration getSqlConf() { if (sqlConf != null) { return sqlConf; } Configuration conf = getContainingTask().getJobConfiguration(); ExecutionConfig.GlobalJobParameters paras = getExecutionConfig().getGlobalJobParameters(); if (paras != null) { conf.addAll(paras.toMap()); } this.sqlConf = conf; return conf; } }
if(exeConfig.getGlobalJobParameters() == null){ exeConfig.setGlobalJobParameters(globalJobParameters); }else if(exeConfig.getGlobalJobParameters() instanceof Configuration){ ((Configuration) exeConfig.getGlobalJobParameters()).addAll(globalJobParameters);
GlobalJobParameters config = getExecutionConfig().getGlobalJobParameters(); StormConfig stormConfig = new StormConfig();
/** * @param env * @param topic * @param time 订阅的时间 * @return * @throws IllegalAccessException */ public static DataStreamSource<Metrics> buildSource(StreamExecutionEnvironment env, String topic, Long time) throws IllegalAccessException { ParameterTool parameterTool = (ParameterTool) env.getConfig().getGlobalJobParameters(); Properties props = buildKafkaProps(parameterTool); FlinkKafkaConsumer011<Metrics> consumer = new FlinkKafkaConsumer011<>( topic, new MetricSchema(), props); //重置offset到time时刻 if (time != 0L) { Map<KafkaTopicPartition, Long> partitionOffset = buildOffsetByTime(props, parameterTool, time); consumer.setStartFromSpecificOffsets(partitionOffset); } return env.addSource(consumer); }
@Override public final void run(final SourceContext<OUT> ctx) throws Exception { final GlobalJobParameters config = super.getRuntimeContext().getExecutionConfig() .getGlobalJobParameters(); StormConfig stormConfig = new StormConfig();