UserProvider provider = UserProvider.instantiate(conf); if (provider.isHBaseSecurityEnabled() && provider.isHadoopSecurityEnabled()) { try { if (provider.shouldLoginFromKeytab()) { String host = Strings.domainNamePointerToHostName(DNS.getDefaultHost( conf.get("hbase.client.dns.interface", "default"), conf.get("hbase.client.dns.nameserver", "default"))); provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL, host); return provider.getCurrent();
private static User loginFromKeytabAndReturnUser(UserProvider provider) throws IOException { try { provider.login(HBASE_CLIENT_KEYTAB_FILE, HBASE_CLIENT_KERBEROS_PRINCIPAL); } catch (IOException ioe) { LOG.error("Error while trying to login as user {} through {}, with message: {}.", HBASE_CLIENT_KERBEROS_PRINCIPAL, HBASE_CLIENT_KEYTAB_FILE, ioe.getMessage()); throw ioe; } return provider.getCurrent(); }
@Override public Connection createConnection(Configuration conf) throws IOException { User user = UserProvider.instantiate(conf).getCurrent(); return ConnectionUtils.createShortCircuitConnection(conf, null, user, this.serverName, this.rpcServices, this.rpcServices); }
/** * Obtain an authentication token, for the specified cluster, on behalf of the current user * and add it to the credentials for the given map reduce job. * * @param job The job that requires the permission. * @param conf The configuration to use in connecting to the peer cluster * @throws IOException When the authentication token cannot be obtained. */ public static void initCredentialsForCluster(Job job, Configuration conf) throws IOException { UserProvider userProvider = UserProvider.instantiate(job.getConfiguration()); if (userProvider.isHBaseSecurityEnabled()) { try { Connection peerConn = ConnectionFactory.createConnection(conf); try { TokenUtil.addTokenForJob(peerConn, userProvider.getCurrent(), job); } finally { peerConn.close(); } } catch (InterruptedException e) { LOG.info("Interrupted obtaining user authentication token"); Thread.interrupted(); } } }
protected MySlowHBaseHandler(Configuration c) throws IOException { super(c, UserProvider.instantiate(c)); }
/** * @return the userName for the current logged-in user. * @throws IOException if the underlying user cannot be obtained */ public String getCurrentUserName() throws IOException { User user = getCurrent(); return user == null ? null : user.getName(); }
@Test public void testLoginWithUserKeytabAndPrincipal() throws Exception { String clientKeytab = getClientKeytabForTesting(); String clientPrincipal = getClientPrincipalForTesting(); assertNotNull("Path for client keytab is not specified.", clientKeytab); assertNotNull("Client principal is not specified.", clientPrincipal); Configuration conf = getSecuredConfiguration(); conf.set(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, clientKeytab); conf.set(AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL, clientPrincipal); UserGroupInformation.setConfiguration(conf); UserProvider provider = UserProvider.instantiate(conf); assertTrue("Client principal or keytab is empty", provider.shouldLoginFromKeytab()); provider.login(AuthUtil.HBASE_CLIENT_KEYTAB_FILE, AuthUtil.HBASE_CLIENT_KERBEROS_PRINCIPAL); User loginUser = provider.getCurrent(); assertEquals(CLIENT_NAME, loginUser.getShortName()); assertEquals(getClientPrincipalForTesting(), loginUser.getName()); }
private static Pair<FilterHolder, Class<? extends ServletContainer>> loginServerPrincipal( UserProvider userProvider, Configuration conf) throws Exception { Class<? extends ServletContainer> containerClass = ServletContainer.class; if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) { String machineName = Strings.domainNamePointerToHostName( DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"), conf.get(REST_DNS_NAMESERVER, "default"))); String keytabFilename = conf.get(REST_KEYTAB_FILE); Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(), REST_KEYTAB_FILE + " should be set if security is enabled"); String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL); Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(), REST_KERBEROS_PRINCIPAL + " should be set if security is enabled"); userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName); if (conf.get(REST_AUTHENTICATION_TYPE) != null) { containerClass = RESTServletContainer.class; FilterHolder authFilter = new FilterHolder(); authFilter.setClassName(AuthFilter.class.getName()); authFilter.setName("AuthenticationFilter"); return new Pair<>(authFilter,containerClass); } } return new Pair<>(null, containerClass); }
private User getActiveUser() throws IOException { // for non-rpc handling, fallback to system user User user = RpcServer.getRequestUser().orElse(userProvider.getCurrent()); // this is for testing if (userProvider.isHadoopSecurityEnabled() && "simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) { return User.createUserForTesting(conf, user.getShortName(), new String[] {}); } return user; }
synchronized (HBaseSecurityUtil.class) { if (legacyProvider == null) { legacyProvider = UserProvider.instantiate(hbaseConfig); String keytab = (String) conf.get(STORM_KEYTAB_FILE_KEY); if (keytab != null) { hbaseConfig.set(STORM_USER_NAME_KEY, userName); legacyProvider.login(STORM_KEYTAB_FILE_KEY, STORM_USER_NAME_KEY, InetAddress.getLocalHost().getCanonicalHostName());
@Test public void testCreateUserForTestingGroupCache() throws Exception { Configuration conf = HBaseConfiguration.create(); User uCreated = User.createUserForTesting(conf, "group_user", new String[] { "MYGROUP" }); UserProvider up = UserProvider.instantiate(conf); User uProvided = up.create(UserGroupInformation.createRemoteUser("group_user")); assertArrayEquals(uCreated.getGroupNames(), uProvided.getGroupNames()); }
this.userProvider = UserProvider.instantiate(conf); this.isSecurityEnabled = userProvider.isHBaseSecurityEnabled(); if (isSecurityEnabled) { saslProps = SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
/** * Releases a previously acquired delegation token. */ public void releaseDelegationToken() { if (userProvider.isHadoopSecurityEnabled()) { if (userToken != null && !hasForwardedToken) { try { userToken.cancel(this.fs.getConf()); } catch (Exception e) { LOG.warn("Failed to cancel HDFS delegation token: " + userToken, e); } } this.userToken = null; this.fs = null; } }
BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId) throws IOException { super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor); this.rpcClient = rpcClient; if (remoteId.getAddress().isUnresolved()) { throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); } this.connectionHeaderPreamble = getConnectionHeaderPreamble(); ConnectionHeader header = getConnectionHeader(); ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize()); DataOutputStream dos = new DataOutputStream(baos); dos.writeInt(header.getSerializedSize()); header.writeTo(dos); assert baos.size() == 4 + header.getSerializedSize(); this.connectionHeaderWithLength = baos.getBuffer(); UserGroupInformation ticket = remoteId.ticket.getUGI(); this.threadName = "IPC Client (" + this.rpcClient.socketFactory.hashCode() + ") connection to " + remoteId.getAddress().toString() + ((ticket == null) ? " from an unknown user" : (" from " + ticket.getUserName())); if (this.rpcClient.conf.getBoolean(BlockingRpcClient.SPECIFIC_WRITE_THREAD, false)) { callSender = new CallSender(threadName, this.rpcClient.conf); callSender.start(); } else { callSender = null; } }
@BeforeClass public static void setUpBeforeClass() throws Exception { // set the always on security provider UserProvider.setUserProviderForTesting(util.getConfiguration(), HadoopSecurityEnabledUserProviderForTesting.class); // setup configuration SecureTestUtil.enableSecurity(util.getConfiguration()); util.getConfiguration().setInt(LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY, MAX_FILES_PER_REGION_PER_FAMILY); // change default behavior so that tag values are returned with normal rpcs util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY, KeyValueCodecWithTags.class.getCanonicalName()); util.startMiniCluster(); // Wait for the ACL table to become available util.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME); setupNamespace(); }
protected void login(UserProvider user, String host) throws IOException { user.login("hbase.regionserver.keytab.file", "hbase.regionserver.kerberos.principal", host); }
/** * Obtain an authentication token, for the specified cluster, on behalf of the current user * and add it to the credentials for the given map reduce job. * * @param job The job that requires the permission. * @param conf The configuration to use in connecting to the peer cluster * @throws IOException When the authentication token cannot be obtained. */ public static void initCredentialsForCluster(Job job, Configuration conf) throws IOException { UserProvider userProvider = UserProvider.instantiate(job.getConfiguration()); if (userProvider.isHBaseSecurityEnabled()) { try { Connection peerConn = ConnectionFactory.createConnection(conf); try { TokenUtil.addTokenForJob(peerConn, userProvider.getCurrent(), job); } finally { peerConn.close(); } } catch (InterruptedException e) { LOG.info("Interrupted obtaining user authentication token"); Thread.interrupted(); } } }
/** * Creates a short-circuit connection that can bypass the RPC layer (serialization, * deserialization, networking, etc..) when talking to a local server. * @param conf the current configuration * @param pool the thread pool to use for batch operations * @param user the user the connection is for * @param serverName the local server name * @param admin the admin interface of the local server * @param client the client interface of the local server * @return an short-circuit connection. * @throws IOException if IO failure occurred */ public static ClusterConnection createShortCircuitConnection(final Configuration conf, ExecutorService pool, User user, final ServerName serverName, final AdminService.BlockingInterface admin, final ClientService.BlockingInterface client) throws IOException { if (user == null) { user = UserProvider.instantiate(conf).getCurrent(); } return new ShortCircuitingClusterConnection(conf, pool, user, serverName, admin, client); }
public void start() throws IOException { random = new SecureRandom(); userProvider = UserProvider.instantiate(conf); ugiReferenceCounter = new ConcurrentHashMap<>(); fs = FileSystem.get(conf); baseStagingDir = new Path(FSUtils.getRootDir(conf), HConstants.BULKLOAD_STAGING_DIR_NAME); if (conf.get("hbase.bulkload.staging.dir") != null) { LOG.warn("hbase.bulkload.staging.dir " + " is deprecated. Bulkload staging directory is " + baseStagingDir); } if (!fs.exists(baseStagingDir)) { fs.mkdirs(baseStagingDir, PERM_HIDDEN); } }
/** * Returns the active user to which authorization checks should be applied. * If we are in the context of an RPC call, the remote user is used, * otherwise the currently logged in user is used. */ private User getActiveUser(ObserverContext<?> ctx) throws IOException { // for non-rpc handling, fallback to system user Optional<User> optionalUser = ctx.getCaller(); if (optionalUser.isPresent()) { return optionalUser.get(); } return userProvider.getCurrent(); }