private static void runMetadataTransaction(MetadataProvider metadataProvider, Work work) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); try { // Performs the actual work. work.run(); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } }
private List<Dataset> getAllDatasetsForRebalance(String dataverseName) throws Exception { List<Dataset> datasets; MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { datasets = getDatasetsInDataverseForRebalance(dataverseName, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
private List<Dataset> getAllDatasetsForRebalance() throws Exception { List<Dataset> datasets = new ArrayList<>(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dv : dataverses) { datasets.addAll(getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx)); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
/** * Perform recovery of DDL operations metadata records */ public static void startDDLRecovery() throws AlgebricksException { // #. clean up any record which has pendingAdd/DelOp flag // as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then // to INDEX_DATASET. MetadataTransactionContext mdTxnCtx = null; LOGGER.info("Starting DDL recovery ..."); try { mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dataverse : dataverses) { recoverDataverse(mdTxnCtx, dataverse); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("Completed DDL recovery."); } catch (Exception e) { try { LOGGER.error("Failure during DDL recovery", e); MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); } catch (Exception e2) { e.addSuppressed(e2); } throw MetadataException.create(e); } }
protected void recover(ICcApplicationContext appCtx) throws HyracksDataException { try { LOGGER.info("Starting Global Recovery"); MetadataManager.INSTANCE.init(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); mdTxnCtx = doRecovery(appCtx, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); recoveryCompleted = true; recovering = false; LOGGER.info("Global Recovery Completed. Refreshing cluster state..."); appCtx.getClusterStateManager().refreshState(); } catch (Exception e) { throw HyracksDataException.create(e); } }
(ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxn); final String nodeGroupName = "ng"; transactor.join(); final MetadataTransactionContext readMdTxn = MetadataManager.INSTANCE.beginTransaction(); try { final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName);
protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt, IRequestParameters requestParameters) throws Exception { CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt; String dvName = stmtCreateDataverse.getDataverseName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName); try { doCreateDataverseStatement(mdTxnCtx, metadataProvider, stmtCreateDataverse); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
protected Dataverse handleUseDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { DataverseDecl dvd = (DataverseDecl) stmt; SourceLocation sourceLoc = dvd.getSourceLocation(); String dvName = dvd.getDataverseName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName); try { Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv == null) { throw new MetadataException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dvName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return dv; } catch (Exception e) { abort(e, e, mdTxnCtx); throw new MetadataException(ErrorCode.METADATA_ERROR, e, sourceLoc, e.toString()); } finally { metadataProvider.getLocks().unlock(); } }
@Test public void abortMetadataTxn() throws Exception { ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxn); final String nodeGroupName = "ng"; try { final List<String> ngNodes = Arrays.asList("asterix_nc1"); MetadataManager.INSTANCE.addNodegroup(mdTxn, new NodeGroup(nodeGroupName, ngNodes)); MetadataManager.INSTANCE.abortTransaction(mdTxn); } finally { metadataProvider.getLocks().unlock(); } // ensure that the node group was not added final MetadataTransactionContext readMdTxn = MetadataManager.INSTANCE.beginTransaction(); try { final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName); if (nodegroup != null) { throw new AssertionError("nodegroup was found after metadata txn was aborted"); } } finally { MetadataManager.INSTANCE.commitTransaction(readMdTxn); } }
/** * Gets the file splits of {@code dataset} * * @param integrationUtil * @param dataset * @return the file splits of the dataset * @throws RemoteException * @throws AlgebricksException */ public static FileSplit[] getDatasetSplits(AsterixHyracksIntegrationUtil integrationUtil, Dataset dataset) throws RemoteException, AlgebricksException { final ICcApplicationContext ccAppCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { return SplitsAndConstraintsUtil.getIndexSplits(dataset, dataset.getDatasetName(), mdTxnCtx, ccAppCtx.getClusterStateManager()); } finally { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } }
protected void handleNodegroupDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt; SourceLocation sourceLoc = stmtDelete.getSourceLocation(); String nodegroupName = stmtDelete.getNodeGroupName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodegroupName); try { NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName); if (ng == null) { if (!stmtDelete.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_NODEGROUP, sourceLoc, nodegroupName); } } else { MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName, false); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
protected void handleTypeDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt; SourceLocation sourceLoc = stmtTypeDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtTypeDrop.getDataverseName()); String typeName = stmtTypeDrop.getTypeName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + typeName); try { Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName); if (dt == null) { if (!stmtTypeDrop.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, typeName); } } else { MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
/** * Gets the reference of dataset {@code dataset} from metadata * * @param integrationUtil * @param datasetName * @return the dataset reference if found. Otherwise null. * @throws AlgebricksException * @throws RemoteException */ public static Dataset getDataset(AsterixHyracksIntegrationUtil integrationUtil, String datasetName) throws AlgebricksException, RemoteException { final ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset; try { dataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName); } finally { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); metadataProvider.getLocks().unlock(); } return dataset; }
protected void handleDropFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt; SourceLocation sourceLoc = stmtFeedPolicyDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName()); String policyName = stmtFeedPolicyDrop.getPolicyName().getValue(); MetadataLockUtil.dropFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + policyName); try { FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName); if (feedPolicy == null) { if (!stmtFeedPolicyDrop.getIfExists()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Unknown policy " + policyName + " in dataverse " + dataverseName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return; } MetadataManager.INSTANCE.dropFeedPolicy(mdTxnCtx, dataverseName, policyName); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
protected void handleDropFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception { FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt; SourceLocation sourceLoc = stmtFeedDrop.getSourceLocation(); String dataverseName = getActiveDataverse(stmtFeedDrop.getDataverseName()); String feedName = stmtFeedDrop.getFeedName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.dropFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName); try { Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName); if (feed == null) { if (!stmtFeedDrop.getIfExists()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "There is no feed with this name " + feedName + "."); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return; } doDropFeed(hcc, metadataProvider, feed, sourceLoc); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); // Retrieves file splits of the dataset. MetadataProvider metadataProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null); try { metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); // Metadata transaction commits. MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return recordType; } finally { metadataProvider.getLocks().unlock(); } } }
protected void handleFunctionDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt; SourceLocation sourceLoc = stmtDropFunction.getSourceLocation(); FunctionSignature signature = stmtDropFunction.getFunctionSignature(); signature.setNamespace(getActiveDataverseName(signature.getNamespace())); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.functionStatementBegin(lockManager, metadataProvider.getLocks(), signature.getNamespace(), signature.getNamespace() + "." + signature.getName()); try { Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature); // If function == null && stmtDropFunction.getIfExists() == true, commit txn directly. if (function == null && !stmtDropFunction.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_FUNCTION, sourceLoc, signature); } else if (function != null) { if (isFunctionUsed(mdTxnCtx, signature, null)) { throw new MetadataException(ErrorCode.METADATA_DROP_FUCTION_IN_USE, sourceLoc, signature); } else { MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature); } } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
public SecondaryIndexInfo createSecondaryIndex(PrimaryIndexInfo primaryIndexInfo, Index secondaryIndex, IStorageComponentProvider storageComponentProvider, int partition) throws AlgebricksException, HyracksDataException, RemoteException, ACIDException { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil.getMergePolicyFactory(primaryIndexInfo.dataset, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); Dataverse dataverse = new Dataverse(primaryIndexInfo.dataset.getDataverseName(), NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP); MetadataProvider mdProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), dataverse); SecondaryIndexInfo secondaryIndexInfo = new SecondaryIndexInfo(primaryIndexInfo, secondaryIndex); try { IResourceFactory resourceFactory = primaryIndexInfo.dataset.getResourceFactory(mdProvider, secondaryIndex, primaryIndexInfo.recordType, primaryIndexInfo.metaType, mergePolicy.first, mergePolicy.second); IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryIndexInfo.fileSplitProvider, resourceFactory, true); IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false); IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); indexBuilder.build(); } finally { mdProvider.getLocks().unlock(); } return secondaryIndexInfo; }
public PrimaryIndexInfo createPrimaryIndex(Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, int[] filterFields, IStorageComponentProvider storageComponentProvider, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, int partition) throws AlgebricksException, HyracksDataException, RemoteException, ACIDException { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil.getMergePolicyFactory(dataset, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators); Dataverse dataverse = new Dataverse(dataset.getDataverseName(), NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP); MetadataProvider mdProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), dataverse); try { IResourceFactory resourceFactory = dataset.getResourceFactory(mdProvider, primaryIndexInfo.index, recordType, metaType, mergePolicy.first, mergePolicy.second); IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider(), resourceFactory, true); IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false); IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); indexBuilder.build(); } finally { mdProvider.getLocks().unlock(); } return primaryIndexInfo; }
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }