Tabnine Logo
ExecutionConfig.setCodeAnalysisMode
Code IndexAdd Tabnine to your IDE (free)

How to use
setCodeAnalysisMode
method
in
org.apache.flink.api.common.ExecutionConfig

Best Java code snippets using org.apache.flink.api.common.ExecutionConfig.setCodeAnalysisMode (Showing top 11 results out of 315)

origin: apache/flink

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: apache/flink

@Test
public void testFunctionAnalyzerPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertTrue(fw2.contains(1));
  assertTrue(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionForwardedAnnotationPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapperWithForwardAnnotation<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionSkipCodeAnalysisAnnotationPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapperWithSkipAnnotation<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertFalse(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionApiPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>())
      .withForwardedFields("f0")
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: org.apache.flink/flink-test-utils_2.10

public TestEnvironment(
    LocalFlinkMiniCluster miniCluster,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.miniCluster = Preconditions.checkNotNull(miniCluster);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: org.apache.flink/flink-test-utils_2.11

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: org.apache.flink/flink-test-utils

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: com.alibaba.blink/flink-test-utils

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: seznam/euphoria

env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
origin: seznam/euphoria

env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
org.apache.flink.api.commonExecutionConfigsetCodeAnalysisMode

Javadoc

Sets the CodeAnalysisMode of the program. Specifies to which extent user-defined functions are analyzed in order to give the Flink optimizer an insight of UDF internals and inform the user about common implementation mistakes. The static code analyzer pre-interprets user-defined functions in order to get implementation insights for program improvements that can be printed to the log, automatically applied, or disabled.

Popular methods of ExecutionConfig

  • <init>
  • isObjectReuseEnabled
    Returns whether object reuse has been enabled or disabled. @see #enableObjectReuse()
  • disableSysoutLogging
    Disables the printing of progress update messages to System.out
  • getAutoWatermarkInterval
    Returns the interval of the automatic watermark emission.
  • setGlobalJobParameters
    Register a custom, serializable user configuration object.
  • enableObjectReuse
    Enables reusing objects that Flink internally uses for deserialization and passing data to user-code
  • setAutoWatermarkInterval
    Sets the interval of the automatic watermark emission. Watermarks are used throughout the streaming
  • disableObjectReuse
    Disables reusing objects that Flink internally uses for deserialization and passing data to user-cod
  • getRestartStrategy
    Returns the restart strategy which has been set for the current job.
  • isSysoutLoggingEnabled
    Gets whether progress update messages should be printed to System.out
  • registerKryoType
    Registers the given type with the serialization stack. If the type is eventually serialized as a POJ
  • registerTypeWithKryoSerializer
    Registers the given Serializer via its class as a serializer for the given type at the KryoSerialize
  • registerKryoType,
  • registerTypeWithKryoSerializer,
  • setRestartStrategy,
  • getParallelism,
  • addDefaultKryoSerializer,
  • getGlobalJobParameters,
  • getNumberOfExecutionRetries,
  • getRegisteredKryoTypes,
  • setParallelism,
  • getDefaultKryoSerializerClasses

Popular in Java

  • Start an intent from android
  • getContentResolver (Context)
  • scheduleAtFixedRate (ScheduledExecutorService)
  • scheduleAtFixedRate (Timer)
  • Date (java.util)
    A specific moment in time, with millisecond precision. Values typically come from System#currentTime
  • Iterator (java.util)
    An iterator over a sequence of objects, such as a collection.If a collection has been changed since
  • TreeSet (java.util)
    TreeSet is an implementation of SortedSet. All optional operations (adding and removing) are support
  • ConcurrentHashMap (java.util.concurrent)
    A plug-in replacement for JDK1.5 java.util.concurrent.ConcurrentHashMap. This version is based on or
  • Handler (java.util.logging)
    A Handler object accepts a logging request and exports the desired messages to a target, for example
  • Annotation (javassist.bytecode.annotation)
    The annotation structure.An instance of this class is returned bygetAnnotations() in AnnotationsAttr
  • Top Sublime Text plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now