congrats Icon
New! Tabnine Pro 14-day free trial
Start a free trial
Tabnine Logo
FieldTransformOperation
Code IndexAdd Tabnine to your IDE (free)

How to use
FieldTransformOperation
in
co.cask.cdap.etl.api.lineage.field

Best Java code snippets using co.cask.cdap.etl.api.lineage.field.FieldTransformOperation (Showing top 6 results out of 315)

origin: co.cask.hydrator/core-plugins

FieldOperation joinOperation = new FieldTransformOperation("Join", JOIN_OPERATION_DESCRIPTION, joinInputs,
                              new ArrayList<>(joinOutputs));
operations.add(joinOperation);
  FieldOperation identity = new FieldTransformOperation(operationName, IDENTITY_OPERATION_DESCRIPTION,
                             Collections.singletonList(stagedInputField),
                             outputFieldInfo.name);
 FieldOperation transform = new FieldTransformOperation(operationName, RENAME_OPERATION_DESCRIPTION,
                             Collections.singletonList(stagedInputField),
                             outputFieldInfo.name);
origin: co.cask.cdap/cdap-data-pipeline

case TRANSFORM:
 FieldTransformOperation transform = (FieldTransformOperation) fieldOperation;
 List<InputField> inputFields = createInputFields(transform.getInputFields(), stageName,
                          processedOperations);
 newOperation = new TransformOperation(newOperationName, transform.getDescription(), inputFields,
                    transform.getOutputFields());
 currentOperationOutputs.addAll(transform.getOutputFields());
 break;
case WRITE:
origin: co.cask.cdap/cdap-data-pipeline

case TRANSFORM:
 FieldTransformOperation transform = (FieldTransformOperation) pipelineOperation;
 validateInputs(pipelineOperation.getName(), transform.getInputFields(), validInputsSoFar);
 updateInvalidOutputs(transform.getInputFields(), unusedOutputs, redundantOutputs);
 validInputsSoFar.addAll(transform.getOutputFields());
 for (String field : transform.getOutputFields()) {
  List<String> origins = unusedOutputs.computeIfAbsent(field, k -> new ArrayList<>());
  origins.add(pipelineOperation.getName());
origin: co.cask.hydrator/core-plugins

@Override
public void prepareRun(StageSubmitterContext context) throws Exception {
 super.prepareRun(context);
 List<String> inputFields = new ArrayList<>();
 List<String> outputFields = new ArrayList<>();
 Schema inputSchema = context.getInputSchema();
 if (SchemaValidator.canRecordLineage(inputSchema, "input")) {
  //noinspection ConstantConditions
  inputFields = inputSchema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList());
 }
 Schema outputSchema = context.getOutputSchema();
 if (SchemaValidator.canRecordLineage(outputSchema, "output")) {
  //noinspection ConstantConditions
  outputFields = outputSchema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList());
 }
 FieldOperation dataPrepOperation = new FieldTransformOperation("Python", config.script, inputFields, outputFields);
 context.record(Collections.singletonList(dataPrepOperation));
}
origin: co.cask.hydrator/core-plugins

@Override
public void prepareRun(StageSubmitterContext context) throws Exception {
 super.prepareRun(context);
 List<String> inputFields = new ArrayList<>();
 List<String> outputFields = new ArrayList<>();
 Schema inputSchema = context.getInputSchema();
 if (SchemaValidator.canRecordLineage(inputSchema, "input")) {
  //noinspection ConstantConditions
  inputFields = inputSchema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList());
 }
 Schema outputSchema = context.getOutputSchema();
 if (SchemaValidator.canRecordLineage(outputSchema, "output")) {
  //noinspection ConstantConditions
  outputFields = outputSchema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList());
 }
 FieldOperation dataPrepOperation = new FieldTransformOperation("JavaScript", config.script, inputFields,
                                 outputFields);
 context.record(Collections.singletonList(dataPrepOperation));
}
origin: co.cask.hydrator/core-plugins

@Override
public void prepareRun(BatchAggregatorContext context) throws Exception {
 super.prepareRun(context);
 LinkedList<FieldOperation> fllOperations = new LinkedList<>();
 // in configurePipeline all the necessary checks have been performed already to set output schema
 if (SchemaValidator.canRecordLineage(context.getOutputSchema(), "output")) {
  Schema inputSchema = context.getInputSchema();
  // for every function record the field level operation details
  for (GroupByConfig.FunctionInfo functionInfo : conf.getAggregates()) {
   Schema.Field outputSchemaField = getOutputSchemaField(functionInfo, inputSchema);
   String operationName = String.format("Group %s", functionInfo.getField());
   String description = String.format("Aggregate function applied: '%s'.", functionInfo.getFunction());
   FieldOperation operation = new FieldTransformOperation(operationName, description,
                               Collections.singletonList(functionInfo.getField()),
                               outputSchemaField.getName());
   fllOperations.add(operation);
  }
 }
 context.record(fllOperations);
}
co.cask.cdap.etl.api.lineage.fieldFieldTransformOperation

Javadoc

Represent the transform operation from collection of input fields to collection of output fields.

Most used methods

  • <init>
    Create the instance of a transform operation.
  • getDescription
  • getInputFields
  • getOutputFields

Popular in Java

  • Finding current android device location
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • compareTo (BigDecimal)
  • requestLocationUpdates (LocationManager)
  • PriorityQueue (java.util)
    A PriorityQueue holds elements on a priority heap, which orders the elements according to their natu
  • ResourceBundle (java.util)
    ResourceBundle is an abstract class which is the superclass of classes which provide Locale-specifi
  • Stack (java.util)
    Stack is a Last-In/First-Out(LIFO) data structure which represents a stack of objects. It enables u
  • JarFile (java.util.jar)
    JarFile is used to read jar entries and their associated data from jar files.
  • ImageIO (javax.imageio)
  • HttpServlet (javax.servlet.http)
    Provides an abstract class to be subclassed to create an HTTP servlet suitable for a Web site. A sub
  • 21 Best Atom Packages for 2021
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now