Tabnine Logo
TaskInfo.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.flink.api.common.TaskInfo
constructor

Best Java code snippets using org.apache.flink.api.common.TaskInfo.<init> (Showing top 20 results out of 315)

origin: apache/flink

private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep)
    throws Exception {
  @SuppressWarnings("unchecked")
  GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
  // build the runtime context and compute broadcast variables, if necessary
  TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
  
  RuntimeUDFContext ctx;
  MetricGroup metrics = new UnregisteredMetricsGroup();
  if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
    ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
        new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
  } else {
    ctx = null;
  }
  return typedSource.executeOnCollections(ctx, executionConfig);
}

origin: apache/flink

private <IN, OUT> List<OUT> executeUnaryOperator(SingleInputOperator<?, ?, ?> operator, int superStep) throws Exception {
  Operator<?> inputOp = operator.getInput();
  if (inputOp == null) {
    throw new InvalidProgramException("The unary operation " + operator.getName() + " has no input.");
  }
  
  @SuppressWarnings("unchecked")
  List<IN> inputData = (List<IN>) execute(inputOp, superStep);
  
  @SuppressWarnings("unchecked")
  SingleInputOperator<IN, OUT, ?> typedOp = (SingleInputOperator<IN, OUT, ?>) operator;
  
  // build the runtime context and compute broadcast variables, if necessary
  TaskInfo taskInfo = new TaskInfo(typedOp.getName(), 1, 0, 1, 0);
  RuntimeUDFContext ctx;
  MetricGroup metrics = new UnregisteredMetricsGroup();
  if (RichFunction.class.isAssignableFrom(typedOp.getUserCodeWrapper().getUserCodeClass())) {
    ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
        new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
    
    for (Map.Entry<String, Operator<?>> bcInputs : operator.getBroadcastInputs().entrySet()) {
      List<?> bcData = execute(bcInputs.getValue());
      ctx.setBroadcastVariable(bcInputs.getKey(), bcData);
    }
  } else {
    ctx = null;
  }
  return typedOp.executeOnCollections(inputData, ctx, executionConfig);
}

origin: apache/flink

private void testExecuteOnCollection(FlatMapFunction<String, String> udf, List<String> input, boolean mutableSafe) throws Exception {
  ExecutionConfig executionConfig = new ExecutionConfig();
  if (mutableSafe) {
    executionConfig.disableObjectReuse();
  } else {
    executionConfig.enableObjectReuse();
  }
  final TaskInfo taskInfo = new TaskInfo("Test UDF", 4, 0, 4, 0);
  // run on collections
  final List<String> result = getTestFlatMapOperator(udf)
      .executeOnCollections(input,
          new RuntimeUDFContext(
            taskInfo,  null, executionConfig, new HashMap<String, Future<Path>>(),
            new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()),
          executionConfig);
  Assert.assertEquals(input.size(), result.size());
  Assert.assertEquals(input, result);
}
origin: apache/flink

@Test
public void testCheckRuntimeContextAccess() {
  final SerializedOutputFormat<Value> inputFormat = new SerializedOutputFormat<Value>();
  final TaskInfo taskInfo = new TaskInfo("test name", 3, 1, 3, 0);
  
  inputFormat.setRuntimeContext(new RuntimeUDFContext(
      taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
      new HashMap<String, Future<Path>>(),
      new HashMap<String, Accumulator<?, ?>>(),
      new UnregisteredMetricsGroup()));
  assertEquals(inputFormat.getRuntimeContext().getIndexOfThisSubtask(), 1);
  assertEquals(inputFormat.getRuntimeContext().getNumberOfParallelSubtasks(),3);
}
origin: apache/flink

@Test
public void testCheckRuntimeContextAccess() {
  final SerializedInputFormat<Value> inputFormat = new SerializedInputFormat<Value>();
  final TaskInfo taskInfo = new TaskInfo("test name", 3, 1, 3, 0);
  inputFormat.setRuntimeContext(
      new RuntimeUDFContext(
          taskInfo, getClass().getClassLoader(), new ExecutionConfig(),
          new HashMap<String, Future<Path>>(),
          new HashMap<String, Accumulator<?, ?>>(),
          new UnregisteredMetricsGroup()));
  assertEquals(inputFormat.getRuntimeContext().getIndexOfThisSubtask(), 1);
  assertEquals(inputFormat.getRuntimeContext().getNumberOfParallelSubtasks(),3);
}
origin: apache/flink

private <IN> void executeDataSink(GenericDataSinkBase<?> sink, int superStep) throws Exception {
  Operator<?> inputOp = sink.getInput();
  if (inputOp == null) {
    throw new InvalidProgramException("The data sink " + sink.getName() + " has no input.");
  }
  
  @SuppressWarnings("unchecked")
  List<IN> input = (List<IN>) execute(inputOp);
  
  @SuppressWarnings("unchecked")
  GenericDataSinkBase<IN> typedSink = (GenericDataSinkBase<IN>) sink;
  // build the runtime context and compute broadcast variables, if necessary
  TaskInfo taskInfo = new TaskInfo(typedSink.getName(), 1, 0, 1, 0);
  RuntimeUDFContext ctx;
  MetricGroup metrics = new UnregisteredMetricsGroup();
    
  if (RichOutputFormat.class.isAssignableFrom(typedSink.getUserCodeWrapper().getUserCodeClass())) {
    ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
        new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
  } else {
    ctx = null;
  }
  typedSink.executeOnCollections(input, ctx, executionConfig);
}

origin: apache/flink

TaskInfo taskInfo = new TaskInfo(typedOp.getName(), 1, 0, 1, 0);
RuntimeUDFContext ctx;
origin: apache/flink

final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
origin: apache/flink

@SuppressWarnings({"rawtypes", "unchecked"})
@Before
public void setup() {
  joiner = new MockRichFlatJoinFunction();
  baseOperator =
    new OuterJoinOperatorBase(joiner,
      new BinaryOperatorInformation(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO,
        BasicTypeInfo.STRING_TYPE_INFO), new int[0], new int[0], "TestJoiner", null);
  executionConfig = new ExecutionConfig();
  String taskName = "Test rich outer join function";
  TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
  HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<>();
  HashMap<String, Future<Path>> cpTasks = new HashMap<>();
  runtimeContext = new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks,
    accumulatorMap, new UnregisteredMetricsGroup());
}
origin: apache/flink

final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
origin: apache/flink

final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.disableObjectReuse();
origin: apache/flink

    new Tuple2<>("bar", 4)));
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
origin: apache/flink

    new Tuple2<>("bar", 4)));
final TaskInfo taskInfo = new TaskInfo(taskName, 1, 0, 1, 0);
origin: apache/flink

final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo("test_sink", 1, 0, 1, 0);
executionConfig.disableObjectReuse();
in.reset();
origin: apache/flink

final TaskInfo taskInfo = new TaskInfo("op", 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
origin: apache/flink

this.taskInfo = new TaskInfo(
  "", /* task name */
  1, /* num key groups / max parallelism */
origin: apache/flink

final HashMap<String, Accumulator<?, ?>> accumulators = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo("Test UDF", 4, 0, 4, 0);
final RuntimeContext ctx = new RuntimeUDFContext(
    taskInfo, null, executionConfig, cpTasks, accumulators, new UnregisteredMetricsGroup());
origin: apache/flink

final TaskInfo taskInfo = new TaskInfo("test_source", 1, 0, 1, 0);
origin: com.alibaba.blink/flink-core

private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep)
    throws Exception {
  @SuppressWarnings("unchecked")
  GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
  // build the runtime context and compute broadcast variables, if necessary
  TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
  
  RuntimeUDFContext ctx;
  MetricGroup metrics = new UnregisteredMetricsGroup();
  if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
    ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulatorRegistry, metrics) :
        new IterationRuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulatorRegistry, metrics);
  } else {
    ctx = null;
  }
  return typedSource.executeOnCollections(ctx, executionConfig);
}

origin: org.apache.flink/flink-core

private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep)
    throws Exception {
  @SuppressWarnings("unchecked")
  GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
  // build the runtime context and compute broadcast variables, if necessary
  TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
  
  RuntimeUDFContext ctx;
  MetricGroup metrics = new UnregisteredMetricsGroup();
  if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
    ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics) :
        new IterationRuntimeUDFContext(taskInfo, userCodeClassLoader, executionConfig, cachedFiles, accumulators, metrics);
  } else {
    ctx = null;
  }
  return typedSource.executeOnCollections(ctx, executionConfig);
}

org.apache.flink.api.commonTaskInfo<init>

Popular methods of TaskInfo

  • getIndexOfThisSubtask
    Gets the number of this parallel subtask. The numbering starts from 0 and goes up to parallelism-1 (
  • getNumberOfParallelSubtasks
    Gets the parallelism with which the parallel task runs.
  • getTaskNameWithSubtasks
    Returns the name of the task, appended with the subtask indicator, such as "MyTask (3/6)", where 3 w
  • getTaskName
    Returns the name of the task
  • getMaxNumberOfParallelSubtasks
    Gets the max parallelism aka the max number of subtasks.
  • getAttemptNumber
    Gets the attempt number of this parallel subtask. First attempt is numbered 0. The attempt number co
  • getAllocationIDAsString
    Returns the allocation id for where this task is executed.

Popular in Java

  • Running tasks concurrently on multiple threads
  • setScale (BigDecimal)
  • scheduleAtFixedRate (Timer)
  • addToBackStack (FragmentTransaction)
  • Point (java.awt)
    A point representing a location in (x,y) coordinate space, specified in integer precision.
  • RandomAccessFile (java.io)
    Allows reading from and writing to a file in a random-access manner. This is different from the uni-
  • Proxy (java.net)
    This class represents proxy server settings. A created instance of Proxy stores a type and an addres
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • Cipher (javax.crypto)
    This class provides access to implementations of cryptographic ciphers for encryption and decryption
  • LoggerFactory (org.slf4j)
    The LoggerFactory is a utility class producing Loggers for various logging APIs, most notably for lo
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now