private boolean isIgnorable(HyracksDataException e) { return e.getErrorCode() == INDEX_DOES_NOT_EXIST && options.contains(IF_EXISTS); }
public static Throwable getCause(Throwable e, String component, int code) { Throwable current = e; Throwable expected = (current instanceof HyracksDataException && ((HyracksDataException) current).getErrorCode() == code && ((HyracksDataException) current).getComponent().equals(component)) ? current : null; Throwable cause = e.getCause(); while (cause != null && cause != current) { current = cause; expected = (current instanceof HyracksDataException && ((HyracksDataException) current).getErrorCode() == code && ((HyracksDataException) current).getComponent().equals(component)) ? current : expected; cause = current.getCause(); } return expected == null ? current : expected; }
private boolean canRetry(HyracksDataException e) throws HyracksDataException { if (e.getErrorCode() == CANNOT_DROP_IN_USE_INDEX && options.contains(WAIT_ON_IN_USE)) { if (maxWaitTimeMillis <= 0) { return false; } try { TimeUnit.MILLISECONDS.sleep(DROP_ATTEMPT_WAIT_TIME_MILLIS); maxWaitTimeMillis -= DROP_ATTEMPT_WAIT_TIME_MILLIS; return true; } catch (InterruptedException e1) { throw HyracksDataException.create(e1); } } return false; } }
@Override public ITupleReference delete(ITupleReference tuple) throws HyracksDataException { try { buddyBTreeBulkLoader.add(tuple); } catch (HyracksDataException e) { //deleting a key multiple times is OK if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) { cleanupArtifacts(); throw e; } } return tuple; }
@Override public Status getResultStatus() { try { return resultDirectory.getResultStatus(jobId, resultSetId); } catch (HyracksDataException e) { if (e.getErrorCode() != ErrorCode.NO_RESULT_SET) { LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e); } } catch (Exception e) { LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e); } return null; }
@Override public void dropFeedPolicy(TxnId txnId, String dataverseName, String policyName) throws AlgebricksException, RemoteException { try { ITupleReference searchKey = createTuple(dataverseName, policyName); ITupleReference tuple = getTupleToBeDeleted(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey); deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) { throw new AlgebricksException("Unknown feed policy " + policyName, e); } else { throw new AlgebricksException(e); } } }
@Override public void addIndex(TxnId txnId, Index index) throws AlgebricksException { try { IndexTupleTranslator tupleWriter = tupleTranslatorProvider.getIndexTupleTranslator(txnId, this, true); ITupleReference tuple = tupleWriter.getTupleFromMetadataEntity(index); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.INDEX_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("An index with name '" + index.getIndexName() + "' already exists.", e); } else { throw new AlgebricksException(e); } } }
@Override public void addNode(TxnId txnId, Node node) throws AlgebricksException, RemoteException { try { NodeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeTupleTranslator(true); ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(node); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.NODE_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A node with name '" + node.getNodeName() + "' already exists.", e); } else { throw new AlgebricksException(e); } } }
@Override public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException, RemoteException { try { DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true); ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException( "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e); } else { throw new AlgebricksException(e); } } }
private static void undoUpsertOrDelete(ILSMIndexAccessor indexAccessor, ILogRecord logRecord) throws HyracksDataException { if (logRecord.getOldValue() == null) { try { indexAccessor.forcePhysicalDelete(logRecord.getNewValue()); } catch (HyracksDataException hde) { // Since we're undoing according the write-ahead log, the actual upserting tuple // might not have been written to memory yet. if (hde.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) { throw hde; } } } else { indexAccessor.forceUpsert(logRecord.getOldValue()); } }
@Override public void modifyNodeGroup(TxnId txnId, NodeGroup nodeGroup, Operation modificationOp) throws AlgebricksException, RemoteException { try { NodeGroupTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeGroupTupleTranslator(true); ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(nodeGroup); modifyMetadataIndex(modificationOp, txnId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException( "A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.", e); } else { throw new AlgebricksException(e); } } }
@Override public void addDatatype(TxnId txnId, Datatype datatype) throws AlgebricksException, RemoteException { try { DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(txnId, this, true); ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(datatype); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException( "A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e); } else { throw new AlgebricksException(e); } } }
public static HyracksDataException create(HyracksDataException e, String nodeId) { return new HyracksDataException(e.getComponent(), e.getErrorCode(), e.getMessage(), e.getCause(), nodeId, e.getStackTrace(), e.getParams()); } }
private void dropNonExisting(IHyracksTaskContext ctx, IndexDataflowHelperFactory helperFactory) throws Exception { dropFailed.set(false); // Dropping non-existing index IndexDropOperatorNodePushable dropNonExistingOp = new IndexDropOperatorNodePushable(helperFactory, EnumSet.noneOf(DropOption.class), ctx, 0); try { dropNonExistingOp.initialize(); } catch (HyracksDataException e) { e.printStackTrace(); Assert.assertEquals(ErrorCode.INDEX_DOES_NOT_EXIST, e.getErrorCode()); dropFailed.set(true); } Assert.assertTrue(dropFailed.get()); }
@Override public void addLibrary(TxnId txnId, Library library) throws AlgebricksException, RemoteException { try { // Insert into the 'Library' dataset. LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(true); ITupleReference libraryTuple = tupleReaderWriter.getTupleFromMetadataEntity(library); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.LIBRARY_DATASET, libraryTuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A library with this name " + library.getDataverseName() + " already exists in dataverse '" + library.getDataverseName() + "'.", e); } else { throw new AlgebricksException(e); } } }
@Override public void addFeedPolicy(TxnId txnId, FeedPolicyEntity feedPolicy) throws AlgebricksException, RemoteException { try { // Insert into the 'FeedPolicy' dataset. FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(true); ITupleReference feedPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedPolicy); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, feedPolicyTuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A feed policy with this name " + feedPolicy.getPolicyName() + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e); } else { throw new AlgebricksException(e); } } }
@Override public void addFunction(TxnId txnId, Function function) throws AlgebricksException, RemoteException { try { // Insert into the 'function' dataset. FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(true); ITupleReference functionTuple = tupleReaderWriter.getTupleFromMetadataEntity(function); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A function with this name " + function.getName() + " and arity " + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.", e); } else { throw new AlgebricksException(e); } } }
private void assertFailure(Action action, int errorCode) throws Exception { HyracksDataException exception = action.getFailure(); try { Assert.assertTrue(action.hasFailed()); Assert.assertNotNull(exception); Assert.assertEquals(errorCode, exception.getErrorCode()); } catch (Exception e) { throw new Exception("Expected failure: " + errorCode + ". Found failure: " + exception); } }
public void delete(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx) throws HyracksDataException { InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx; ctx.getTupleIter().reset(tuple); while (ctx.getTupleIter().hasNext()) { ctx.getTupleIter().next(); ITupleReference deleteTuple = ctx.getTupleIter().getTuple(); try { btreeAccessor.delete(deleteTuple); } catch (HyracksDataException e) { if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) { // Ignore this exception, since a document may have duplicate tokens. throw e; } } } }
protected void upsertTuple(OrderedIndexTestContext ctx, ISerializerDeserializer[] fieldSerdes, Object value) throws HyracksDataException { ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(ctx.getFieldCount()); for (int i = 0; i < ctx.getFieldCount(); i++) { tupleBuilder.addField(fieldSerdes[i], value); } ArrayTupleReference tuple = new ArrayTupleReference(); tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray()); try { ctx.getIndexAccessor().upsert(tuple); } catch (HyracksDataException e) { if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) { throw e; } } }