Codota Logo
ExecutionConfig.setCodeAnalysisMode
Code IndexAdd Codota to your IDE (free)

How to use
setCodeAnalysisMode
method
in
org.apache.flink.api.common.ExecutionConfig

Best Java code snippets using org.apache.flink.api.common.ExecutionConfig.setCodeAnalysisMode (Showing top 11 results out of 315)

  • Common ways to obtain ExecutionConfig
private void myMethod () {
ExecutionConfig e =
  • Codota Iconnew ExecutionConfig()
  • Codota IconStreamExecutionEnvironment env;env.getConfig().disableSysoutLogging()
  • Codota IconExecutionEnvironment env;env.getConfig().disableSysoutLogging()
  • Smart code suggestions by Codota
}
origin: apache/flink

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: apache/flink

@Test
public void testFunctionAnalyzerPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertTrue(fw2.contains(1));
  assertTrue(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionForwardedAnnotationPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapperWithForwardAnnotation<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionSkipCodeAnalysisAnnotationPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapperWithSkipAnnotation<Tuple3<Long, String, Integer>>())
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertFalse(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: apache/flink

@Test
public void testFunctionApiPrecedence() {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
  @SuppressWarnings("unchecked")
  DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42));
  input
      .map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>())
      .withForwardedFields("f0")
      .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>());
  Plan plan = env.createProgramPlan();
  GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next();
  MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput();
  SingleInputSemanticProperties semantics = mapper.getSemanticProperties();
  FieldSet fw1 = semantics.getForwardingTargetFields(0, 0);
  FieldSet fw2 = semantics.getForwardingTargetFields(0, 1);
  FieldSet fw3 = semantics.getForwardingTargetFields(0, 2);
  assertNotNull(fw1);
  assertNotNull(fw2);
  assertNotNull(fw3);
  assertTrue(fw1.contains(0));
  assertFalse(fw2.contains(1));
  assertFalse(fw3.contains(2));
}
origin: org.apache.flink/flink-test-utils_2.10

public TestEnvironment(
    LocalFlinkMiniCluster miniCluster,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.miniCluster = Preconditions.checkNotNull(miniCluster);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: org.apache.flink/flink-test-utils_2.11

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: org.apache.flink/flink-test-utils

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: com.alibaba.blink/flink-test-utils

public TestEnvironment(
    JobExecutor jobExecutor,
    int parallelism,
    boolean isObjectReuseEnabled,
    Collection<Path> jarFiles,
    Collection<URL> classPaths) {
  this.jobExecutor = Preconditions.checkNotNull(jobExecutor);
  this.jarFiles = Preconditions.checkNotNull(jarFiles);
  this.classPaths = Preconditions.checkNotNull(classPaths);
  setParallelism(parallelism);
  // disabled to improve build time
  getConfig().setCodeAnalysisMode(CodeAnalysisMode.DISABLE);
  if (isObjectReuseEnabled) {
    getConfig().enableObjectReuse();
  } else {
    getConfig().disableObjectReuse();
  }
  lastEnv = null;
}
origin: seznam/euphoria

env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
origin: seznam/euphoria

env.getConfig().setCodeAnalysisMode(CodeAnalysisMode.OPTIMIZE);
org.apache.flink.api.commonExecutionConfigsetCodeAnalysisMode

Javadoc

Sets the CodeAnalysisMode of the program. Specifies to which extent user-defined functions are analyzed in order to give the Flink optimizer an insight of UDF internals and inform the user about common implementation mistakes. The static code analyzer pre-interprets user-defined functions in order to get implementation insights for program improvements that can be printed to the log, automatically applied, or disabled.

Popular methods of ExecutionConfig

  • <init>
  • isObjectReuseEnabled
    Returns whether object reuse has been enabled or disabled. @see #enableObjectReuse()
  • disableSysoutLogging
    Disables the printing of progress update messages to System.out
  • getAutoWatermarkInterval
    Returns the interval of the automatic watermark emission.
  • setGlobalJobParameters
    Register a custom, serializable user configuration object.
  • enableObjectReuse
    Enables reusing objects that Flink internally uses for deserialization and passing data to user-code
  • setAutoWatermarkInterval
    Sets the interval of the automatic watermark emission. Watermarks are used throughout the streaming
  • disableObjectReuse
    Disables reusing objects that Flink internally uses for deserialization and passing data to user-cod
  • getRestartStrategy
    Returns the restart strategy which has been set for the current job.
  • isSysoutLoggingEnabled
    Gets whether progress update messages should be printed to System.out
  • registerKryoType
    Registers the given type with the serialization stack. If the type is eventually serialized as a POJ
  • registerTypeWithKryoSerializer
    Registers the given Serializer via its class as a serializer for the given type at the KryoSerialize
  • registerKryoType,
  • registerTypeWithKryoSerializer,
  • setRestartStrategy,
  • getParallelism,
  • addDefaultKryoSerializer,
  • getGlobalJobParameters,
  • getNumberOfExecutionRetries,
  • getRegisteredKryoTypes,
  • setParallelism,
  • getDefaultKryoSerializerClasses

Popular in Java

  • Creating JSON documents from java classes using gson
  • requestLocationUpdates (LocationManager)
  • addToBackStack (FragmentTransaction)
  • scheduleAtFixedRate (ScheduledExecutorService)
    Creates and executes a periodic action that becomes enabled first after the given initial delay, and
  • FileInputStream (java.io)
    A FileInputStream obtains input bytes from a file in a file system. What files are available depends
  • URLEncoder (java.net)
    This class is used to encode a string using the format required by application/x-www-form-urlencoded
  • BitSet (java.util)
    This class implements a vector of bits that grows as needed. Each component of the bit set has a boo
  • Map (java.util)
    A Map is a data structure consisting of a set of keys and values in which each key is mapped to a si
  • TreeMap (java.util)
    A Red-Black tree based NavigableMap implementation. The map is sorted according to the Comparable of
  • ZipFile (java.util.zip)
    This class provides random read access to a zip file. You pay more to read the zip file's central di
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now