protected KerberosProperties getKerberosProperties(File kerberosConfigFile) { return new KerberosProperties(kerberosConfigFile); }
public AuthScheme create(HttpContext context) { return new KerberosKeytabSPNegoScheme(); }
@Override protected void init(final ControllerServiceInitializationContext context) { List<PropertyDescriptor> props = new ArrayList<>(); props.add(DATABASE_URL); props.add(HIVE_CONFIGURATION_RESOURCES); props.add(DB_USER); props.add(DB_PASSWORD); props.add(MAX_WAIT_TIME); props.add(MAX_TOTAL_CONNECTIONS); props.add(VALIDATION_QUERY); props.add(KERBEROS_CREDENTIALS_SERVICE); kerberosConfigFile = context.getKerberosConfigurationFile(); kerberosProperties = new KerberosProperties(kerberosConfigFile); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); properties = props; }
UserGroupInformation ugi; synchronized (RESOURCES_LOCK) { if (SecurityUtil.isSecurityEnabled(config)) { String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); ugi = SecurityUtil.loginKerberos(config, principal, keyTab); fs = getFileSystemAsUser(config, ugi); } else { config.set("ipc.client.fallback-to-simple-auth-allowed", "true"); config.set("hadoop.security.authentication", "simple"); ugi = SecurityUtil.loginSimple(config); fs = getFileSystemAsUser(config, ugi);
@Override protected void init(ControllerServiceInitializationContext config) throws InitializationException { kerberosConfigFile = config.getKerberosConfigurationFile(); kerberosProperties = getKerberosProperties(kerberosConfigFile); List<PropertyDescriptor> props = new ArrayList<>(); props.add(HADOOP_CONF_FILES); props.add(KERBEROS_CREDENTIALS_SERVICE); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); props.add(ZOOKEEPER_QUORUM); props.add(ZOOKEEPER_CLIENT_PORT); props.add(ZOOKEEPER_ZNODE_PARENT); props.add(HBASE_CLIENT_RETRIES); props.add(PHOENIX_CLIENT_JAR_LOCATION); props.addAll(getAdditionalProperties()); this.properties = Collections.unmodifiableList(props); }
/** * As of Apache NiFi 1.5.0, due to changes made to * {@link SecurityUtil#loginKerberos(Configuration, String, String)}, which is used by this * class to authenticate a principal with Kerberos, Hive controller services no longer * attempt relogins explicitly. For more information, please read the documentation for * {@link SecurityUtil#loginKerberos(Configuration, String, String)}. * <p/> * In previous versions of NiFi, a {@link org.apache.nifi.hadoop.KerberosTicketRenewer} was started by * {@link HiveConfigurator#authenticate(Configuration, String, String, long)} when the Hive * controller service was enabled. The use of a separate thread to explicitly relogin could cause race conditions * with the implicit relogin attempts made by hadoop/Hive code on a thread that references the same * {@link UserGroupInformation} instance. One of these threads could leave the * {@link javax.security.auth.Subject} in {@link UserGroupInformation} to be cleared or in an unexpected state * while the other thread is attempting to use the {@link javax.security.auth.Subject}, resulting in failed * authentication attempts that would leave the Hive controller service in an unrecoverable state. * * @see SecurityUtil#loginKerberos(Configuration, String, String) */ public UserGroupInformation authenticate(final Configuration hiveConfig, String principal, String keyTab) throws AuthenticationFailedException { UserGroupInformation ugi; try { ugi = SecurityUtil.loginKerberos(hiveConfig, principal, keyTab); } catch (IOException ioe) { throw new AuthenticationFailedException("Kerberos Authentication for Hive failed", ioe); } return ugi; }
public Collection<ValidationResult> validate(String configFiles, String principal, String keyTab, AtomicReference<ValidationResources> validationResourceHolder, ComponentLog log) { final List<ValidationResult> problems = new ArrayList<>(); ValidationResources resources = validationResourceHolder.get(); // if no resources in the holder, or if the holder has different resources loaded, // then load the Configuration and set the new resources in the holder if (resources == null || !configFiles.equals(resources.getConfigResources())) { log.debug("Reloading validation resources"); resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles)); validationResourceHolder.set(resources); } final Configuration hiveConfig = resources.getConfiguration(); problems.addAll(KerberosProperties.validatePrincipalAndKeytab(this.getClass().getSimpleName(), hiveConfig, principal, keyTab, log)); return problems; }
public static List<ValidationResult> validatePrincipalAndKeytab(final String subject, final Configuration config, final String principal, final String keytab, final ComponentLog logger) { final List<ValidationResult> results = new ArrayList<>(); // if security is enabled then the keytab and principal are required final boolean isSecurityEnabled = SecurityUtil.isSecurityEnabled(config); final boolean blankPrincipal = (principal == null || principal.isEmpty()); if (isSecurityEnabled && blankPrincipal) { results.add(new ValidationResult.Builder() .valid(false) .subject(subject) .explanation("Kerberos Principal must be provided when using a secure configuration") .build()); } final boolean blankKeytab = (keytab == null || keytab.isEmpty()); if (isSecurityEnabled && blankKeytab) { results.add(new ValidationResult.Builder() .valid(false) .subject(subject) .explanation("Kerberos Keytab must be provided when using a secure configuration") .build()); } if (!isSecurityEnabled && (!blankPrincipal || !blankKeytab)) { logger.warn("Configuration does not have security enabled, Keytab and Principal will be ignored"); } return results; }
@Override public byte[] generateToken(byte[] input, String authServer, Credentials credentials) { Set<Principal> principals = new HashSet<>(); principals.add(credentials.getUserPrincipal()); Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>()); try { LoginContext loginContext = new LoginContext("", subject, null, new KerberosConfiguration(credentials.getUserPrincipal().getName(), ((KerberosKeytabCredentials) credentials).getKeytab())); loginContext.login(); Subject loggedInSubject = loginContext.getSubject(); return Subject.doAs(loggedInSubject, new PrivilegedExceptionAction<byte[]>() { public byte[] run() throws UnknownHostException, ClassNotFoundException, GSSException, IllegalAccessException, NoSuchFieldException { GSSManager gssManager = GSSManager.getInstance(); String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP", authServer); Oid serviceOid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL"); GSSName serviceName = gssManager.createName(servicePrincipal, serviceOid); Oid mechOid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID"); GSSContext gssContext = gssManager.createContext(serviceName, mechOid, null, 0); gssContext.requestCredDeleg(true); gssContext.requestMutualAuth(true); return gssContext.initSecContext(input, 0, input.length); } }); } catch (PrivilegedActionException | LoginException e) { throw new RuntimeException(e); } }
private HttpClient openConnection() throws IOException { HttpClientBuilder httpClientBuilder = HttpClientBuilder.create(); if (sslContextService != null) { try { SSLContext sslContext = getSslSocketFactory(sslContextService); httpClientBuilder.setSSLContext(sslContext); } catch (KeyStoreException | CertificateException | NoSuchAlgorithmException | UnrecoverableKeyException | KeyManagementException e) { throw new IOException(e); } } if (credentialsService != null) { CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials(new AuthScope(null, -1, null), new KerberosKeytabCredentials(credentialsService.getPrincipal(), credentialsService.getKeytab())); httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); Lookup<AuthSchemeProvider> authSchemeRegistry = RegistryBuilder.<AuthSchemeProvider> create() .register(AuthSchemes.SPNEGO, new KerberosKeytabSPNegoAuthSchemeProvider()).build(); httpClientBuilder.setDefaultAuthSchemeRegistry(authSchemeRegistry); } RequestConfig.Builder requestConfigBuilder = RequestConfig.custom(); requestConfigBuilder.setConnectTimeout(connectTimeout); requestConfigBuilder.setConnectionRequestTimeout(connectTimeout); requestConfigBuilder.setSocketTimeout(connectTimeout); httpClientBuilder.setDefaultRequestConfig(requestConfigBuilder.build()); return httpClientBuilder.build(); }
if (SecurityUtil.isSecurityEnabled(hbaseConfig)) { String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); ugi = SecurityUtil.loginKerberos(hbaseConfig, principal, keyTab); getLogger().info("Successfully logged in as principal {} with keytab {}", new Object[] {principal, keyTab});
@Override protected void init(final ControllerServiceInitializationContext context) { List<PropertyDescriptor> props = new ArrayList<>(); props.add(DATABASE_URL); props.add(HIVE_CONFIGURATION_RESOURCES); props.add(DB_USER); props.add(DB_PASSWORD); props.add(MAX_WAIT_TIME); props.add(MAX_TOTAL_CONNECTIONS); props.add(VALIDATION_QUERY); props.add(KERBEROS_CREDENTIALS_SERVICE); kerberosConfigFile = context.getKerberosConfigurationFile(); kerberosProperties = new KerberosProperties(kerberosConfigFile); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); properties = props; }
@Override protected void init(ControllerServiceInitializationContext config) throws InitializationException { kerberosConfigFile = config.getKerberosConfigurationFile(); kerberosProperties = getKerberosProperties(kerberosConfigFile); List<PropertyDescriptor> props = new ArrayList<>(); props.add(HADOOP_CONF_FILES); props.add(KERBEROS_CREDENTIALS_SERVICE); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); props.add(ZOOKEEPER_QUORUM); props.add(ZOOKEEPER_CLIENT_PORT); props.add(ZOOKEEPER_ZNODE_PARENT); props.add(HBASE_CLIENT_RETRIES); props.add(PHOENIX_CLIENT_JAR_LOCATION); props.addAll(getAdditionalProperties()); this.properties = Collections.unmodifiableList(props); }
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) { return new KerberosProperties(kerberosConfigFile); }
/** * As of Apache NiFi 1.5.0, due to changes made to * {@link SecurityUtil#loginKerberos(Configuration, String, String)}, which is used by this * class to authenticate a principal with Kerberos, Hive controller services no longer * attempt relogins explicitly. For more information, please read the documentation for * {@link SecurityUtil#loginKerberos(Configuration, String, String)}. * <p/> * In previous versions of NiFi, a {@link org.apache.nifi.hadoop.KerberosTicketRenewer} was started by * {@link HiveConfigurator#authenticate(Configuration, String, String, long)} when the Hive * controller service was enabled. The use of a separate thread to explicitly relogin could cause race conditions * with the implicit relogin attempts made by hadoop/Hive code on a thread that references the same * {@link UserGroupInformation} instance. One of these threads could leave the * {@link javax.security.auth.Subject} in {@link UserGroupInformation} to be cleared or in an unexpected state * while the other thread is attempting to use the {@link javax.security.auth.Subject}, resulting in failed * authentication attempts that would leave the Hive controller service in an unrecoverable state. * * @see SecurityUtil#loginKerberos(Configuration, String, String) */ public UserGroupInformation authenticate(final Configuration hiveConfig, String principal, String keyTab) throws AuthenticationFailedException { UserGroupInformation ugi; try { ugi = SecurityUtil.loginKerberos(hiveConfig, principal, keyTab); } catch (IOException ioe) { throw new AuthenticationFailedException("Kerberos Authentication for Hive failed", ioe); } return ugi; }
public Collection<ValidationResult> validate(String configFiles, String principal, String keyTab, AtomicReference<ValidationResources> validationResourceHolder, ComponentLog log) { final List<ValidationResult> problems = new ArrayList<>(); ValidationResources resources = validationResourceHolder.get(); // if no resources in the holder, or if the holder has different resources loaded, // then load the Configuration and set the new resources in the holder if (resources == null || !configFiles.equals(resources.getConfigResources())) { log.debug("Reloading validation resources"); resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles)); validationResourceHolder.set(resources); } final Configuration hiveConfig = resources.getConfiguration(); problems.addAll(KerberosProperties.validatePrincipalAndKeytab(this.getClass().getSimpleName(), hiveConfig, principal, keyTab, log)); return problems; }
if (SecurityUtil.isSecurityEnabled(hbaseConfig)) { String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); ugi = SecurityUtil.loginKerberos(hbaseConfig, principal, keyTab); getLogger().info("Successfully logged in as principal {} with keytab {}", new Object[] {principal, keyTab});
@Override protected void init(ProcessorInitializationContext context) { List<PropertyDescriptor> props = new ArrayList<>(); props.add(METASTORE_URI); props.add(HIVE_CONFIGURATION_RESOURCES); props.add(DB_NAME); props.add(TABLE_NAME); props.add(PARTITION_COLUMNS); props.add(AUTOCREATE_PARTITIONS); props.add(MAX_OPEN_CONNECTIONS); props.add(HEARTBEAT_INTERVAL); props.add(TXNS_PER_BATCH); props.add(RECORDS_PER_TXN); props.add(CALL_TIMEOUT); props.add(ROLLBACK_ON_FAILURE); props.add(KERBEROS_CREDENTIALS_SERVICE); kerberosConfigFile = context.getKerberosConfigurationFile(); kerberosProperties = new KerberosProperties(kerberosConfigFile); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); propertyDescriptors = Collections.unmodifiableList(props); Set<Relationship> _relationships = new HashSet<>(); _relationships.add(REL_SUCCESS); _relationships.add(REL_FAILURE); _relationships.add(REL_RETRY); relationships = Collections.unmodifiableSet(_relationships); }
@Override protected void init(ProcessorInitializationContext context) { hdfsResources.set(new HdfsResources(null, null, null)); kerberosConfigFile = context.getKerberosConfigurationFile(); kerberosProperties = getKerberosProperties(kerberosConfigFile); List<PropertyDescriptor> props = new ArrayList<>(); props.add(HADOOP_CONFIGURATION_RESOURCES); props.add(KERBEROS_CREDENTIALS_SERVICE); props.add(kerberosProperties.getKerberosPrincipal()); props.add(kerberosProperties.getKerberosKeytab()); props.add(KERBEROS_RELOGIN_PERIOD); props.add(ADDITIONAL_CLASSPATH_RESOURCES); properties = Collections.unmodifiableList(props); }
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) { return new KerberosProperties(kerberosConfigFile); }