@Override public void truncate(final StreamId streamId) throws Exception { impersonator.doAs(streamId, new Callable<Void>() { @Override public Void call() throws Exception { doTruncate(streamId, getStreamLocation(streamId)); return null; } }); }
@Override public void kill() { try { impersonator.doAs(programId, new Callable<Void>() { @Override public Void call() throws Exception { delegate.kill(); return null; } }); } catch (Exception e) { throw Throwables.propagate(e); } }
@Override public void kill() { try { impersonator.doAs(programId, new Callable<Void>() { @Override public Void call() throws Exception { delegate.kill(); return null; } }); } catch (Exception e) { throw Throwables.propagate(e); } }
@Override public void drop(final StreamId streamId) throws Exception { Location streamLocation = impersonator.doAs(streamId, new Callable<Location>() { @Override public Location call() throws Exception { return getStreamLocation(streamId); } }); doDrop(streamId, streamLocation); }
private <T> T execute(final Callable<T> callable) throws IOException { try { return impersonator.doAs(datasetId, callable); } catch (IOException ioe) { throw ioe; } catch (Exception t) { Throwables.propagateIfPossible(t); // since the callables we execute only throw IOException (besides unchecked exceptions), // this should never happen LOG.warn("Unexpected exception while executing dataset admin operation in namespace {}.", datasetId, t); // the only checked exception that the Callables in this class is IOException, and we handle that in the previous // catch statement. So, no checked exceptions should be wrapped by the following statement. However, we need it // because ImpersonationUtils#doAs declares 'throws Exception', because it can throw other checked exceptions // in the general case throw Throwables.propagate(t); } } }
@Override public T call() throws Exception { return impersonator.doAs(namespaceMeta.getNamespaceId(), callable); } };
@Override public Future<? extends ServiceController> terminate() { try { return impersonator.doAs(programId, new Callable<Future<? extends ServiceController>>() { @Override public Future<? extends ServiceController> call() throws Exception { return delegate.terminate(); } }); } catch (Exception e) { return Futures.immediateFailedFuture(e); } }
@Override public Future<? extends ServiceController> terminate() { try { return impersonator.doAs(programId, new Callable<Future<? extends ServiceController>>() { @Override public Future<? extends ServiceController> call() throws Exception { return delegate.terminate(); } }); } catch (Exception e) { return Futures.immediateFailedFuture(e); } }
@Override public boolean exists(final StreamId streamId) throws Exception { try { boolean metaExists = streamMetaStore.streamExists(streamId); if (!metaExists) { return false; } return impersonator.doAs(streamId, new Callable<Boolean>() { @Override public Boolean call() throws Exception { return getConfigLocation(streamId).exists(); } }); } catch (IOException e) { LOG.error("Exception when check for stream exist.", e); return false; } }
@GET @Path("tables") public void getTables(HttpRequest request, final HttpResponder responder, @PathParam("namespace-id") final String namespaceId) { LOG.trace("Received get tables for current user"); try { impersonator.doAs(new NamespaceId(namespaceId), new Callable<Void>() { @Override public Void call() throws Exception { responder.sendJson(HttpResponseStatus.OK, GSON.toJson(exploreService.getTables(namespaceId))); return null; } }); } catch (Throwable t) { LOG.error("Got exception:", t); responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, t.getMessage()); } }
private void disableDataset(HttpResponder responder, final DatasetId datasetId, final DatasetSpecification spec) { try { QueryHandle handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() { @Override public QueryHandle call() throws Exception { return exploreTableManager.disableDataset(datasetId, spec); } }); JsonObject json = new JsonObject(); json.addProperty("handle", handle.getHandle()); responder.sendJson(HttpResponseStatus.OK, json.toString()); } catch (Throwable e) { LOG.error("Got exception while trying to disable explore on dataset {}", datasetId, e); responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage()); } }
private void disableDataset(HttpResponder responder, final DatasetId datasetId, final DatasetSpecification spec) { try { QueryHandle handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() { @Override public QueryHandle call() throws Exception { return exploreTableManager.disableDataset(datasetId, spec); } }); JsonObject json = new JsonObject(); json.addProperty("handle", handle.getHandle()); responder.sendJson(HttpResponseStatus.OK, json.toString()); } catch (Throwable e) { LOG.error("Got exception while trying to disable explore on dataset {}", datasetId, e); responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage()); } }
@Override public QueryHandle execute(HttpRequest request, HttpResponder responder) throws IllegalArgumentException, SQLException, ExploreException, IOException { try { return impersonator.doAs(new NamespaceId(namespaceId), new Callable<QueryHandle>() { @Override public QueryHandle call() throws Exception { return exploreService.deleteNamespace(new NamespaceId(namespaceId)); } }); } catch (ExploreException | SQLException e) { // we know that the callable only throws the above two declared exceptions: throw e; } catch (Exception e) { throw Throwables.propagate(e); } } });
@Override protected void copyArtifact(ArtifactId artifactId, final ArtifactDetail artifactDetail, final File targetFile) throws IOException { try { impersonator.doAs(artifactId, () -> { Locations.linkOrCopy(artifactDetail.getDescriptor().getLocation(), targetFile); return null; }); } catch (Exception e) { Throwables.propagateIfPossible(e, IOException.class); // should not happen throw Throwables.propagate(e); } }
@Override protected void copyArtifact(ArtifactId artifactId, final ArtifactDetail artifactDetail, final File targetFile) throws IOException { try { impersonator.doAs(artifactId, () -> { Locations.linkOrCopy(artifactDetail.getDescriptor().getLocation(), targetFile); return null; }); } catch (Exception e) { Throwables.propagateIfPossible(e, IOException.class); // should not happen throw Throwables.propagate(e); } }
@Override public QueryHandle execute(HttpRequest request, HttpResponder responder) throws IllegalArgumentException, SQLException, ExploreException, IOException { try { return impersonator.doAs(new NamespaceId(namespaceId), new Callable<QueryHandle>() { @Override public QueryHandle call() throws Exception { return exploreService.deleteNamespace(new NamespaceId(namespaceId)); } }); } catch (ExploreException | SQLException e) { // we know that the callable only throws the above two declared exceptions: throw e; } catch (Exception e) { throw Throwables.propagate(e); } } });
private <T> T performDatasetAdmin(final DatasetId datasetInstanceId, Operation<T> operation) throws Exception { try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) { DatasetAdmin admin = impersonator.doAs(datasetInstanceId, (Callable<DatasetAdmin>) () -> { DatasetAdmin admin1 = datasetInstantiator.getDatasetAdmin(datasetInstanceId); if (admin1 == null) { throw new NotFoundException("Couldn't obtain DatasetAdmin for dataset instance " + datasetInstanceId); } // returns a DatasetAdmin that executes operations as a particular user, for a particular namespace return new ImpersonatingDatasetAdmin(admin1, impersonator, datasetInstanceId); }); try { return operation.perform(admin); } finally { Closeables.closeQuietly(admin); } } }
private <T> T performDatasetAdmin(final DatasetId datasetInstanceId, Operation<T> operation) throws Exception { try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) { DatasetAdmin admin = impersonator.doAs(datasetInstanceId, (Callable<DatasetAdmin>) () -> { DatasetAdmin admin1 = datasetInstantiator.getDatasetAdmin(datasetInstanceId); if (admin1 == null) { throw new NotFoundException("Couldn't obtain DatasetAdmin for dataset instance " + datasetInstanceId); } // returns a DatasetAdmin that executes operations as a particular user, for a particular namespace return new ImpersonatingDatasetAdmin(admin1, impersonator, datasetInstanceId); }); try { return operation.perform(admin); } finally { Closeables.closeQuietly(admin); } } }
@Override public TwillController start(final long timeout, final TimeUnit timeoutUnit) { try { return impersonator.doAs(programId, () -> { // Add secure tokens if (User.isHBaseSecurityEnabled(hConf) || UserGroupInformation.isSecurityEnabled()) { addSecureStore(YarnSecureStore.create(secureStoreRenewer.createCredentials())); } return new ImpersonatedTwillController(delegate.start(timeout, timeoutUnit), impersonator, programId); }); } catch (Exception e) { throw Throwables.propagate(e); } } }
@Override public TwillController start(final long timeout, final TimeUnit timeoutUnit) { try { return impersonator.doAs(programId, () -> { // Add secure tokens if (User.isHBaseSecurityEnabled(hConf) || UserGroupInformation.isSecurityEnabled()) { addSecureStore(YarnSecureStore.create(secureStoreRenewer.createCredentials())); } return new ImpersonatedTwillController(delegate.start(timeout, timeoutUnit), impersonator, programId); }); } catch (Exception e) { throw Throwables.propagate(e); } } }