private static void addContextAndThrow(Builder userExceptionBuilder, List<String> context) { if (context != null && !context.isEmpty()) { Preconditions.checkArgument(context.size() % 2 == 0); ListIterator<String> iterator = context.listIterator(); while (iterator.hasNext()) { userExceptionBuilder.addContext(iterator.next(), iterator.next()); } } throw userExceptionBuilder.build(logger); }
private LogicalExpression doUnknown(RexNode o){ final String message = String.format(UNSUPPORTED_REX_NODE_ERROR + "RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()); if(throwUserException) { throw UserException.planError().message(message).build(logger); } else { throw new IllegalStateException(message); } }
public AggregateRelBase(RelOptCluster cluster, RelTraitSet traits, RelNode child, boolean indicator, ImmutableBitSet groupSet, List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) { super(cluster, traits, child, indicator, groupSet, groupSets, aggCalls); aggCalls.forEach(a -> { if (a.filterArg >= 0) { throw UserException.unsupportedError().message("Inline aggregate filtering is not currently supported").build(logger); } }); }
@Override public boolean createOrUpdateDataset(NamespaceService userNamespaceService, NamespaceKey source, NamespaceKey datasetPath, DatasetConfig datasetConfig, NamespaceAttribute... attributes) throws NamespaceException { ManagedStoragePlugin plugin = pluginRetriever.getPlugin(source.getRoot(), true); if(plugin == null){ throw UserException.validationError().message("Unknown source %s", datasetPath.getRoot()).build(logger); } return datasets.createOrUpdateDataset(userNamespaceService, plugin, source, datasetPath, datasetConfig, attributes); }
@Override public RecordWriter getRecordWriter(OperatorContext context, EasyWriter writer) throws IOException { throw UserException .unsupportedError() .message("Writing output in Avro format is not supported") .build(logger); }
@Override public void checkCancel() { if (cancelFlag.isCancelRequested()) { UserException.Builder builder = UserException.planError() .message("Query was cancelled because planning time exceeded %d seconds", cancelFlag.getTimeoutInSecs()); if (phase != null) { builder = builder.addContext("Planner Phase", phase.description); } throw builder.build(logger); } super.checkCancel(); }
@Override public boolean refreshSource(NamespaceKey source, MetadataPolicy metadataPolicy, UpdateType updateType) throws NamespaceException { ManagedStoragePlugin plugin = plugins.get(source.getRoot()); if(plugin == null){ throw UserException.validationError().message("Unknown source %s", source.getRoot()).build(logger); } return plugin.refresh(updateType, metadataPolicy); }
@Override public Void run() throws Exception { try { connection = ConnectionFactory.createConnection(config); logger.info("Connection created: {}.", connection); } catch (IOException ex) { throw UserException.dataReadError(ex).message("Failure while connecting to HBase.").build(logger); } return null; } },
/** * Builds an exception that can be thrown by the caller */ public RuntimeException build() { return b.build(logger); } }
@Override public void startPartition(WritePartition partition) { if(!partition.isSinglePartition()){ throw UserException.dataWriteError().message("Arrow writer doesn't support data partitioning.").build(logger); } }
@Override public void validate(OptionValue v) { super.validate(v); if (v.getNumVal() > max || v.getNumVal() < min) { throw UserException.validationError() .message(String.format("Option %s must be between %d and %d.", getOptionName(), min, max)) .build(logger); } } }
private static <T extends CompleteWork> void verify(final List<NodeEndpoint> endpoints, final List<T> units, final boolean condition, final String msg, Object... args) { if (!condition) { throw UserException.resourceError() .message(msg, args) .addContext("endpoints %s", endpoints) .addContext("workunits %s", units) .build(logger); } } }
@Override public List<String> getDatabases(boolean ignoreAuthzErrors) throws TException { try { authorizer.authorizeShowDatabases(); } catch (final HiveAccessControlException e) { if (ignoreAuthzErrors) { return Collections.emptyList(); } throw UserException.permissionError(e).build(logger); } return super.getDatabases(ignoreAuthzErrors); }
public TransformRuleWrapper<ExtractRule> wrapRule(ExtractRule extractRule) { switch (extractRule.getType()) { case pattern: return new ExtractPatternTransformRuleWrapper(extractRule); case position: return new ExtractPositionTransformRuleWrapper(extractRule); default: throw UserException.unsupportedError() .message("Unexpected extract rule type: %s", extractRule.getType()) .build(logger); } }
@Override public void createDataset(NamespaceKey key, com.google.common.base.Function<DatasetConfig, DatasetConfig> datasetMutator) { ManagedStoragePlugin plugin = pluginRetriever.getPlugin(key.getRoot(), true); if(plugin == null){ throw UserException.validationError().message("Unknown source %s", key.getRoot()).build(logger); } datasets.createDataset(key, plugin, datasetMutator); }
private ArrowBuf allocateHelper(BufferAllocator alloc, final int requestSize) throws Exception{ try { return alloc.buffer(requestSize); } catch (OutOfMemoryException e) { UserException.Builder b = UserException.memoryError(e); rootAllocator.addUsageToExceptionContext(b); throw b.build(logger); } }
private static final RpcException newRPCException(NodeEndpoint endpoint, IOException ioe) { UserRemoteException ure = UserRemoteException.create(UserException .ioExceptionError(ioe) .addIdentity(endpoint) .build(logger).getOrCreatePBError(false)); return new RpcException(ure); } @Test
public static void throwUnsupportedHiveDataTypeError(String unsupportedType) { throw UserException.unsupportedError() .message(ERROR_MSG, unsupportedType) .build(logger); }
private void confirmLast() throws IOException{ parser.nextToken(); if(!parser.isClosed()){ throw getExceptionWithContext( UserException.dataReadError(), currentFieldName, null) .message("Dremio attempted to unwrap a toplevel list " + "in your document. However, it appears that there is trailing content after this top level list. Dremio only " + "supports querying a set of distinct maps or a single json array with multiple inner maps.") .build(logger); } }
public SqlNode parse(String sql) { try { SqlParser parser = SqlParser.create(sql, parserConfig); return parser.parseStmt().accept(STRING_LITERAL_CONVERTER); } catch (SqlParseException e) { UserException.Builder builder = SqlExceptionHelper.parseError(sql, e); builder.message(isInnerQuery ? SqlExceptionHelper.INNER_QUERY_PARSING_ERROR : SqlExceptionHelper.QUERY_PARSING_ERROR); throw builder.build(logger); } }