@Override public void prepareRun(BatchSinkContext context) throws Exception { OutputFormatProvider outputFormatProvider = new BasicOutputFormatProvider(TextOutputFormat.class.getCanonicalName(), ImmutableMap.of(TextOutputFormat.OUTDIR, config.dirName)); if (config.name != null) { Output output = Output.of(config.name, outputFormatProvider); output.alias(config.alias); context.addOutput(output); } else { context.addOutput(Output.of(config.alias, outputFormatProvider)); } }
@Override public void prepareRun(BatchSinkContext context) throws Exception { context.addOutput(Output.ofDataset(config.tableName)); if (!context.datasetExists(config.runtimeDatasetName)) { context.createDataset(config.runtimeDatasetName, KeyValueTable.class.getName(), DatasetProperties.EMPTY); } }
@Override public void prepareRun(BatchSinkContext context) throws Exception { if (!context.datasetExists(config.tableName)) { context.createDataset(config.tableName, "table", DatasetProperties.EMPTY); } context.addOutput(Output.ofDataset(config.tableName)); }
@Override public void prepareRun(BatchSinkContext context) throws DatasetManagementException { Map<String, String> properties = getProperties(); if (!context.datasetExists(properties.get(Properties.BatchReadableWritable.NAME))) { context.createDataset(properties.get(Properties.BatchReadableWritable.NAME), properties.get(Properties.BatchReadableWritable.TYPE), DatasetProperties.builder().addAll(properties).build()); } context.addOutput(Output.ofDataset(properties.get(Properties.BatchReadableWritable.NAME))); } }
@Override public final void prepareRun(BatchSinkContext context) throws InstantiationException { config.validate(); // set format specific properties. OutputFormatProvider outputFormatProvider = context.newPluginInstance(FORMAT_PLUGIN_ID); // record field level lineage information // needs to happen before context.addOutput(), otherwise an external dataset without schema will be created. Schema schema = config.getSchema(); if (schema == null) { schema = context.getInputSchema(); } LineageRecorder lineageRecorder = new LineageRecorder(context, config.getReferenceName()); lineageRecorder.createExternalDataset(schema); if (schema != null && schema.getFields() != null && !schema.getFields().isEmpty()) { recordLineage(lineageRecorder, schema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList())); } Map<String, String> outputProperties = new HashMap<>(outputFormatProvider.getOutputFormatConfiguration()); outputProperties.putAll(getFileSystemProperties(context)); outputProperties.put(FileOutputFormat.OUTDIR, getOutputDir(context.getLogicalStartTime())); context.addOutput(Output.of(config.getReferenceName(), new SinkOutputFormatProvider(outputFormatProvider.getOutputFormatClassName(), outputProperties))); }
tpfsSinkConfig.timeZone); context.addOutput(Output.ofDataset(tpfsSinkConfig.name, sinkArgs));
@Override public void prepareRun(BatchSinkContext context) throws DatasetManagementException, InstantiationException { // if macros were provided, the dataset still needs to be created config.validate(); OutputFormatProvider outputFormatProvider = context.newPluginInstance(FORMAT_PLUGIN_ID); DatasetProperties datasetProperties = createProperties(outputFormatProvider); if (!context.datasetExists(config.getName())) { context.createDataset(config.getName(), PartitionedFileSet.class.getName(), datasetProperties); } PartitionedFileSet files = context.getDataset(config.getName()); snapshotFileSet = new SnapshotFileSet(files); // need to use all the dataset properties as arguments in case the dataset already exists, // created by the previous version of this plugin before output format plugins were used. // in that scenario, the output format attached to the dataset properties will be incorrect, // and must be overridden here. Map<String, String> arguments = new HashMap<>(datasetProperties.getProperties()); if (config.getFileProperties() != null) { arguments = GSON.fromJson(config.getFileProperties(), MAP_TYPE); } context.addOutput(Output.ofDataset(config.getName(), snapshotFileSet.getOutputArguments(context.getLogicalStartTime(), arguments))); }