/** * Abort the ongoing metadata transaction logging the error cause * * @param rootE * @param parentE * @param mdTxnCtx */ public static void abort(Exception rootE, Exception parentE, MetadataTransactionContext mdTxnCtx) { boolean interrupted = Thread.interrupted(); try { if (IS_DEBUG_MODE) { LOGGER.log(Level.ERROR, rootE.getMessage(), rootE); } if (mdTxnCtx != null) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); } } catch (Exception e2) { parentE.addSuppressed(e2); throw new IllegalStateException(rootE); } finally { if (interrupted) { Thread.currentThread().interrupt(); } } }
private List<Dataset> getAllDatasetsForRebalance(String dataverseName) throws Exception { List<Dataset> datasets; MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { datasets = getDatasetsInDataverseForRebalance(dataverseName, mdTxnCtx); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
private List<Dataset> getAllDatasetsForRebalance() throws Exception { List<Dataset> datasets = new ArrayList<>(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); try { List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dv : dataverses) { datasets.addAll(getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx)); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw e; } return datasets; }
/** * Perform recovery of DDL operations metadata records */ public static void startDDLRecovery() throws AlgebricksException { // #. clean up any record which has pendingAdd/DelOp flag // as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then // to INDEX_DATASET. MetadataTransactionContext mdTxnCtx = null; LOGGER.info("Starting DDL recovery ..."); try { mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx); for (Dataverse dataverse : dataverses) { recoverDataverse(mdTxnCtx, dataverse); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); LOGGER.info("Completed DDL recovery."); } catch (Exception e) { try { LOGGER.error("Failure during DDL recovery", e); MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); } catch (Exception e2) { e.addSuppressed(e2); } throw MetadataException.create(e); } }
if (ctx != null) { try { MetadataManager.INSTANCE.abortTransaction(ctx); } catch (ACIDException | RemoteException e2) { e.addSuppressed(e2);
LOGGER.log(Level.ERROR, "Exception in installing library " + libraryName, e); MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
LOGGER.log(Level.ERROR, "Failure during metadata bootstrap", e); MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); } catch (Exception e2) { e.addSuppressed(e2);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { MetadataManager.INSTANCE.abortTransaction(mdTxnCtx); throw new AsterixException(e);
@Test public void abortMetadataTxn() throws Exception { ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxn); final String nodeGroupName = "ng"; try { final List<String> ngNodes = Arrays.asList("asterix_nc1"); MetadataManager.INSTANCE.addNodegroup(mdTxn, new NodeGroup(nodeGroupName, ngNodes)); MetadataManager.INSTANCE.abortTransaction(mdTxn); } finally { metadataProvider.getLocks().unlock(); } // ensure that the node group was not added final MetadataTransactionContext readMdTxn = MetadataManager.INSTANCE.beginTransaction(); try { final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName); if (nodegroup != null) { throw new AssertionError("nodegroup was found after metadata txn was aborted"); } } finally { MetadataManager.INSTANCE.commitTransaction(readMdTxn); } }
sourceDataset.getRebalanceCount() + 1, rebalanceToNodes, metadataProvider); MetadataManager.INSTANCE.abortTransaction(rebalanceTxn); } finally { metadataProvider.getLocks().unlock();
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }