/** * The cipher transformation to use for encrypting session data. */ public String cipherTransformation() { return conf.get("spark.network.crypto.cipher", "AES/CTR/NoPadding"); }
/** * Enables strong encryption. Also enables the new auth protocol, used to negotiate keys. */ public boolean encryptionEnabled() { return conf.getBoolean("spark.network.crypto.enabled", false); }
/** * Initial vector length, in bytes. */ public int ivLength() { return conf.getInt("spark.network.crypto.ivLength", 16); }
/** * The commons-crypto configuration for the module. */ public Properties cryptoConf() { return CryptoUtils.toCryptoConf("spark.network.crypto.config.", conf.getAll()); }
/** * The max number of chunks allowed to be transferred at the same time on shuffle service. * Note that new incoming connections will be closed when the max number is hit. The client will * retry according to the shuffle retry configs (see `spark.shuffle.io.maxRetries` and * `spark.shuffle.io.retryWait`), if those limits are reached the task will fail with fetch * failure. */ public long maxChunksBeingTransferred() { return conf.getLong("spark.shuffle.maxChunksBeingTransferred", Long.MAX_VALUE); } }
/** * The commons-crypto configuration for the module. */ public Properties cryptoConf() { return CryptoUtils.toCryptoConf("spark.network.crypto.config.", conf.getAll()); }
/** * The max number of chunks allowed to be transferred at the same time on shuffle service. * Note that new incoming connections will be closed when the max number is hit. The client will * retry according to the shuffle retry configs (see `spark.shuffle.io.maxRetries` and * `spark.shuffle.io.retryWait`), if those limits are reached the task will fail with fetch * failure. */ public long maxChunksBeingTransferred() { return conf.getLong("spark.shuffle.maxChunksBeingTransferred", Long.MAX_VALUE); } }
/** * Maximum number of retries when binding to a port before giving up. */ public int portMaxRetries() { return conf.getInt("spark.port.maxRetries", 16); }
/** * The algorithm for generated secret keys. Nobody should really need to change this, * but configurable just in case. */ public String keyAlgorithm() { return conf.get("spark.network.crypto.keyAlgorithm", "AES"); }
/** * Whether to fall back to SASL if the new auth protocol fails. Enabled by default for * backwards compatibility. */ public boolean saslFallback() { return conf.getBoolean("spark.network.crypto.saslFallback", true); }
/** * The commons-crypto configuration for the module. */ public Properties cryptoConf() { return CryptoUtils.toCryptoConf("spark.network.crypto.config.", conf.getAll()); }
/** * Max number of times we will try IO exceptions (such as connection timeouts) per request. * If set to 0, we will not do any retries. */ public int maxIORetries() { return conf.getInt(SPARK_NETWORK_IO_MAXRETRIES_KEY, 3); }
/** * The cipher transformation to use for encrypting session data. */ public String cipherTransformation() { return conf.get("spark.network.crypto.cipher", "AES/CTR/NoPadding"); }
/** * Whether the server should enforce encryption on SASL-authenticated connections. */ public boolean saslServerAlwaysEncrypt() { return conf.getBoolean("spark.network.sasl.serverAlwaysEncrypt", false); }
/** * Encryption key length, in bits. */ public int encryptionKeyLength() { return conf.getInt("spark.network.crypto.keyLength", 128); }
/** * The algorithm for generated secret keys. Nobody should really need to change this, * but configurable just in case. */ public String keyAlgorithm() { return conf.get("spark.network.crypto.keyAlgorithm", "AES"); }
/** * Enables strong encryption. Also enables the new auth protocol, used to negotiate keys. */ public boolean encryptionEnabled() { return conf.getBoolean("spark.network.crypto.enabled", false); }
/** * How many iterations to run when generating keys. * * See some discussion about this at: http://security.stackexchange.com/q/3959 * The default value was picked for speed, since it assumes that the secret has good entropy * (128 bits by default), which is not generally the case with user passwords. */ public int keyFactoryIterations() { return conf.getInt("spark.networy.crypto.keyFactoryIterations", 1024); }
/** * The key generation algorithm. This should be an algorithm that accepts a "PBEKeySpec" * as input. The default value (PBKDF2WithHmacSHA1) is available in Java 7. */ public String keyFactoryAlgorithm() { return conf.get("spark.network.crypto.keyFactoryAlgorithm", "PBKDF2WithHmacSHA1"); }
/** * Whether to fall back to SASL if the new auth protocol fails. Enabled by default for * backwards compatibility. */ public boolean saslFallback() { return conf.getBoolean("spark.network.crypto.saslFallback", true); }