@Override public State getFinalState() { State state = new State(); state.setProp("RecordsWritten", recordsWritten()); try { state.setProp("BytesWritten", bytesWritten()); } catch (Exception exception) { // If Writer fails to return bytesWritten, it might not be implemented, or implemented incorrectly. // Omit property instead of failing. } return state; }
/** * Whether data should be committed by the job (as opposed to being commited by the tasks). * Data should be committed by the job if either {@link ConfigurationKeys#JOB_COMMIT_POLICY_KEY} is set to "full", * or {@link ConfigurationKeys#PUBLISH_DATA_AT_JOB_LEVEL} is set to true. */ private static boolean shouldCommitDataInJob(State state) { boolean jobCommitPolicyIsFull = JobCommitPolicy.getCommitPolicy(state.getProperties()) == JobCommitPolicy.COMMIT_ON_FULL_SUCCESS; boolean publishDataAtJobLevel = state.getPropAsBoolean(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL, ConfigurationKeys.DEFAULT_PUBLISH_DATA_AT_JOB_LEVEL); boolean jobDataPublisherSpecified = !Strings.isNullOrEmpty(state.getProp(ConfigurationKeys.JOB_DATA_PUBLISHER_TYPE)); return jobCommitPolicyIsFull || publishDataAtJobLevel || jobDataPublisherSpecified; }
@SuppressWarnings("unchecked") public T withProps(State props) { this.props = new State(props.getProperties()); return (T) this; }
public CachedInstanceKey(State state) { this.numOfEncryptionKeys = state.getPropAsInt(ConfigurationKeys.NUMBER_OF_ENCRYPT_KEYS, ConfigurationKeys.DEFAULT_NUMBER_OF_MASTER_PASSWORDS); this.useStrongEncryptor = shouldUseStrongEncryptor(state); this.fsURI = state.getProp(ConfigurationKeys.ENCRYPT_KEY_LOC); this.masterPasswordFile = state.getProp(ConfigurationKeys.ENCRYPT_KEY_LOC); }
private List<HivePartitionVersion> findVersions(String name, String urn) throws IOException { State state = new State(this.state); Preconditions.checkArgument(this.state.contains(ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST), "Missing required property " + ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST); state.setProp(ComplianceConfigurationKeys.HIVE_DATASET_WHITELIST, this.state.getProp(ComplianceConfigurationKeys.HIVE_VERSIONS_WHITELIST)); setVersions(name, state); log.info("Found " + this.versions.size() + " versions for the dataset " + urn); return this.versions; }
public static HiveUnitUpdateProvider create(State state) { try { return (HiveUnitUpdateProvider) GobblinConstructorUtils.invokeFirstConstructor(Class.forName(state.getProp( OPTIONAL_HIVE_UNIT_UPDATE_PROVIDER_CLASS_KEY, DEFAULT_HIVE_UNIT_UPDATE_PROVIDER_CLASS)), ImmutableList.<Object>of(getFileSystem(state.getProperties())), ImmutableList.of()); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException | ClassNotFoundException | IOException e) { throw new RuntimeException(e); } }
public static void setProxySettingsForFs(State state) { if (state.getPropAsBoolean(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SHOULD_PROXY, ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_DEFAULT_SHOULD_PROXY)) { String proxyUser = state.getProp(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_PROXY_USER); String superUser = state.getProp(ComplianceConfigurationKeys.GOBBLIN_COMPLIANCE_SUPER_USER); String realm = state.getProp(ConfigurationKeys.KERBEROS_REALM); state.setProp(ConfigurationKeys.SHOULD_FS_PROXY_AS_USER, true); state.setProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME, proxyUser); state.setProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS, HostUtils.getPrincipalUsingHostname(superUser, realm)); state.setProp(ConfigurationKeys.FS_PROXY_AUTH_METHOD, ConfigurationKeys.KERBEROS_AUTH); } }
@Override public State getFinalState() { State state = new State(); if (this.writer instanceof FinalState) { state.addAll(((FinalState)this.writer).getFinalState()); } else { LOG.warn("Wrapped writer does not implement FinalState: " + this.writer.getClass()); } state.setProp(THROTTLED_TIME_KEY, this.throttledTime); return state; }
/** * Compare an existing state and a new {@link State} to ensure that the existing {@link State} contains all entries in the new * {@link State}, and update {@link #result} accordingly. */ protected void checkExistingIsSuperstate(State existingState, State newState) { checkExistingIsSuperset(existingState.getProperties().entrySet(), newState.getProperties().entrySet()); }
/** * TODO: Figure out what this means for checkpointing. * Get final state for this object. By default this returns an empty {@link org.apache.gobblin.configuration.State}, but * concrete subclasses can add information that will be added to the task state. * @return Empty {@link org.apache.gobblin.configuration.State}. */ @Override public State getFinalState() { return new State(); }
public HivePartitionVersionRetentionReaper(CleanableDataset dataset, DatasetVersion version, List<String> nonDeletableVersionLocations, State state) { super(dataset, version, nonDeletableVersionLocations, state); this.versionOwner = ((HivePartitionVersion) this.datasetVersion).getOwner(); Preconditions.checkArgument(this.state.contains(ComplianceConfigurationKeys.BACKUP_OWNER), "Missing required property " + ComplianceConfigurationKeys.BACKUP_OWNER); this.backUpOwner = Optional.fromNullable(this.state.getProp(ComplianceConfigurationKeys.BACKUP_OWNER)); this.simulate = this.state.getPropAsBoolean(ComplianceConfigurationKeys.COMPLIANCE_JOB_SIMULATE, ComplianceConfigurationKeys.DEFAULT_COMPLIANCE_JOB_SIMULATE); }
public HivePartitionVersionRetentionReaperPolicy(State state, HivePartitionDataset dataset) { super(state, dataset); Preconditions.checkArgument(state.contains(ComplianceConfigurationKeys.REAPER_RETENTION_DAYS), "Missing required property " + ComplianceConfigurationKeys.REAPER_RETENTION_DAYS); this.retentionDays = state.getPropAsInt(ComplianceConfigurationKeys.REAPER_RETENTION_DAYS); }
/** * Set number of schema evolution DDLs as Sla event metadata */ public static void setEvolutionMetadata(State state, List<String> evolutionDDLs) { state.setProp(EventConstants.SCHEMA_EVOLUTION_DDLS_NUM, evolutionDDLs == null ? 0 : evolutionDDLs.size()); }
/** * Return true if the current publisher can be skipped. * * <p> * For a publisher that can be skipped, it should not have any effect on state persistence. It will be skipped when * a job is cancelled, and all finished tasks are configured to be committed. * </p> */ public boolean canBeSkipped() { return this.state.getPropAsBoolean(ConfigurationKeys.DATA_PUBLISHER_CAN_BE_SKIPPED, ConfigurationKeys.DEFAULT_DATA_PUBLISHER_CAN_BE_SKIPPED); }
/** * {@link Deprecated} use {@link #getEventSubmitterBuilder(Dataset, Optional, FileSystem)} */ @Deprecated public static void populateState(Dataset dataset, Optional<Job> job, FileSystem fs) { dataset.jobProps().setProp(SlaEventKeys.DATASET_URN_KEY, dataset.getUrn()); dataset.jobProps().setProp(SlaEventKeys.PARTITION_KEY, dataset.jobProps().getProp(MRCompactor.COMPACTION_JOB_DEST_PARTITION, "")); dataset.jobProps().setProp(SlaEventKeys.DEDUPE_STATUS_KEY, getOutputDedupeStatus(dataset.jobProps())); dataset.jobProps().setProp(SlaEventKeys.PREVIOUS_PUBLISH_TS_IN_MILLI_SECS_KEY, getPreviousPublishTime(dataset, fs)); dataset.jobProps().setProp(SlaEventKeys.RECORD_COUNT_KEY, getRecordCount(job)); }
private Optional<Iterable<String>> getKeys (State state) { if (!state.contains(CONF_PREFIX + "keys")) { return Optional.empty(); } Iterable<String> keys = state.getPropAsList(CONF_PREFIX + "keys"); return Optional.ofNullable(keys); }
private void createFileSystem(String uri) throws IOException, InterruptedException, URISyntaxException { if (this.state.getPropAsBoolean(ConfigurationKeys.SHOULD_FS_PROXY_AS_USER, ConfigurationKeys.DEFAULT_SHOULD_FS_PROXY_AS_USER)) { // Initialize file system as a proxy user. this.fs = new ProxiedFileSystemWrapper().getProxiedFileSystem(this.state, ProxiedFileSystemWrapper.AuthType.TOKEN, this.state.getProp(ConfigurationKeys.FS_PROXY_AS_USER_TOKEN_FILE), uri, configuration); } else { // Initialize file system as the current user. this.fs = FileSystem.newInstance(URI.create(uri), this.configuration); } }
protected JobListener initJobListener() { CompositeJobListener compositeJobListener = new CompositeJobListener(); List<String> listeners = new State(props).getPropAsList(GOBBLIN_CUSTOM_JOB_LISTENERS, EmailNotificationJobListener.class.getSimpleName()); try { for (String listenerAlias: listeners) { ClassAliasResolver<JobListener> conditionClassAliasResolver = new ClassAliasResolver<>(JobListener.class); compositeJobListener.addJobListener(conditionClassAliasResolver.resolveClass(listenerAlias).newInstance()); } } catch (IllegalAccessException | InstantiationException | ClassNotFoundException e) { throw new IllegalArgumentException(e); } return compositeJobListener; }