@Override public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
/** * Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. * * <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an * operator through the default hash mechanics fails (e.g. because of changes between Flink versions). * * <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash * needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot * assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. * * <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the * automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. * obtained from old logs) can help to reestablish a lost mapping from states to their target operator. * * @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the * logs and web ui. * @return The operator with the user provided hash. */ @PublicEvolving public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
/** * Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. * * <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an * operator through the default hash mechanics fails (e.g. because of changes between Flink versions). * * <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash * needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot * assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. * * <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the * automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. * obtained from old logs) can help to reestablish a lost mapping from states to their target operator. * * @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the * logs and web ui. * @return The operator with the user provided hash. */ @PublicEvolving public CassandraSink<IN> setUidHash(String uidHash) { if (useDataStreamSink) { getSinkTransformation().setUidHash(uidHash); } else { getStreamTransformation().setUidHash(uidHash); } return this; }
@Override public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
@Override public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
/** * Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. * * <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an * operator through the default hash mechanics fails (e.g. because of changes between Flink versions). * * <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash * needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot * assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. * * <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the * automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. * obtained from old logs) can help to reestablish a lost mapping from states to their target operator. * * @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the * logs and web ui. * @return The operator with the user provided hash. */ @PublicEvolving public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
/** * Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. * * <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an * operator through the default hash mechanics fails (e.g. because of changes between Flink versions). * * <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash * needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot * assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. * * <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the * automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. * obtained from old logs) can help to reestablish a lost mapping from states to their target operator. * * @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the * logs and web ui. * @return The operator with the user provided hash. */ @PublicEvolving public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }
/** * Sets an user provided hash for this operator. This will be used AS IS the create the JobVertexID. * * <p>The user provided hash is an alternative to the generated hashes, that is considered when identifying an * operator through the default hash mechanics fails (e.g. because of changes between Flink versions). * * <p><strong>Important</strong>: this should be used as a workaround or for trouble shooting. The provided hash * needs to be unique per transformation and job. Otherwise, job submission will fail. Furthermore, you cannot * assign user-specified hash to intermediate nodes in an operator chain and trying so will let your job fail. * * <p>A use case for this is in migration between Flink versions or changing the jobs in a way that changes the * automatically generated hashes. In this case, providing the previous hashes directly through this method (e.g. * obtained from old logs) can help to reestablish a lost mapping from states to their target operator. * * @param uidHash The user provided hash for this operator. This will become the JobVertexID, which is shown in the * logs and web ui. * @return The operator with the user provided hash. */ @PublicEvolving public DataStreamSink<T> setUidHash(String uidHash) { transformation.setUidHash(uidHash); return this; }