public TreeIndexStatsOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, IStorageManager storageManager) throws HyracksDataException { this.ctx = ctx; this.treeIndexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.storageManager = storageManager; }
public LSMIndexCompactOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory) throws HyracksDataException { this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); }
public TreeIndexDiskOrderScanOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, ISearchOperationCallbackFactory searchCallbackFactory) throws HyracksDataException { this.ctx = ctx; this.treeIndexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.searchCallbackFactory = searchCallbackFactory; }
public IndexDropOperatorNodePushable(IIndexDataflowHelperFactory indexHelperFactory, Set<DropOption> options, IHyracksTaskContext ctx, int partition) throws HyracksDataException { this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.options = options; }
public ActiveSourceOperatorNodePushable(IHyracksTaskContext ctx, ActiveRuntimeId runtimeId) { this.ctx = ctx; activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext() .getApplicationContext()).getActiveManager(); this.runtimeId = runtimeId; }
@Override public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws HyracksDataException { return new PingReader(ctx.getJobletContext().getServiceContext().getNodeId()); }
public IndexInsertUpdateDeleteOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, int[] fieldPermutation, RecordDescriptor inputRecDesc, IndexOperation op, IModificationOperationCallbackFactory modOpCallbackFactory, ITupleFilterFactory tupleFilterFactory) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.modOpCallbackFactory = modOpCallbackFactory; this.tupleFilterFactory = tupleFilterFactory; this.inputRecDesc = inputRecDesc; this.op = op; this.tuple.setFieldPermutation(fieldPermutation); }
@Override public ITransactionSubsystem getTransactionSubsystem(IHyracksTaskContext ctx) { INcApplicationContext appCtx = (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext(); return appCtx.getTransactionSubsystem(); } }
@Override public void jobletStart() { try { TransactionOptions options = new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL); for (TxnId subTxnId : txnIdMap.values()) { ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext()) .getTransactionSubsystem().getTransactionManager().beginTransaction(subTxnId, options); } } catch (ACIDException e) { throw new Error(e); } }
public IndexBulkLoadOperatorNodePushable(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx, int partition, int[] fieldPermutation, float fillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, RecordDescriptor recDesc) throws HyracksDataException { this.ctx = ctx; this.indexHelper = indexDataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.fillFactor = fillFactor; this.verifyInput = verifyInput; this.numElementsHint = numElementsHint; this.checkIfEmptyIndex = checkIfEmptyIndex; this.recDesc = recDesc; tuple.setFieldPermutation(fieldPermutation); }
@Override public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException { return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, ctx, appCtx == null ? (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext() : appCtx); }
@Override public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws HyracksDataException { INCServiceContext serviceCtx = ctx.getJobletContext().getServiceContext(); INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext(); DatasetLifecycleManager dsLifecycleMgr = (DatasetLifecycleManager) appCtx.getDatasetLifecycleManager(); DatasetResource dsr = dsLifecycleMgr.getDatasetLifecycle(datasetId); return new StorageComponentsReader(ctx.getJobletContext().getServiceContext().getNodeId(), dsr); }
@Override public IIndexBuilder create(IHyracksTaskContext ctx, int partition) throws HyracksDataException { FileReference resourceRef = fileSplitProvider.getFileSplits()[partition].getFileReference(ctx.getIoManager()); return new IndexBuilder(ctx.getJobletContext().getServiceContext(), storageManager, storageManager.getResourceIdFactory(ctx.getJobletContext().getServiceContext()), resourceRef, localResourceFactory, durable); } }
private void restoreConfig(IHyracksTaskContext ctx) throws HyracksDataException { if (!configured) { conf = confFactory.getConf(); inputSplits = inputSplitsFactory.getSplits(); nodeName = ctx.getJobletContext().getServiceContext().getNodeId(); configured = true; } }
@Override public void jobletStart() { try { TransactionOptions options = new TransactionOptions(AtomicityLevel.ENTITY_LEVEL); ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext()) .getTransactionSubsystem().getTransactionManager().beginTransaction(txnId, options); } catch (ACIDException e) { throw new Error(e); } }
public FeedCollectOperatorNodePushable(IHyracksTaskContext ctx, FeedConnectionId feedConnectionId, Map<String, String> feedPolicy, int partition) { this.ctx = ctx; this.partition = partition; this.connectionId = feedConnectionId; this.policyAccessor = new FeedPolicyAccessor(feedPolicy); this.activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext() .getApplicationContext()).getActiveManager(); }
@Override public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition) { INCServiceContext serviceCtx = ctx.getJobletContext().getServiceContext(); INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext(); DatasetLifecycleManager dsLifecycleMgr = (DatasetLifecycleManager) appCtx.getDatasetLifecycleManager(); DatasetResource dsr = dsLifecycleMgr.getDatasetLifecycle(datasetId); return new DatasetResourcesReader(dsr); }
@Override protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException { IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE); TestStorageManagerComponentHolder.init(pageSize, numPages, MAX_OPEN_FILES); bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext()); ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory(); LinkedMetaDataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory); btree = BTreeUtils.createBTree(bufferCache, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM, file, freePageManager, false); } }
private void createSecondaryIndex() throws HyracksDataException, RemoteException, ACIDException, AlgebricksException { SecondaryIndexInfo secondaryIndexInfo = nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex, storageManager, 0); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo.getFileSplitProvider()); secondaryIndexDataflowHelper = iHelperFactory.create(taskCtx.getJobletContext().getServiceContext(), 0); secondaryIndexDataflowHelper.open(); secondaryLsmBtree = (TestLsmBtree) secondaryIndexDataflowHelper.getIndexInstance(); secondaryIndexDataflowHelper.close(); }
@Before public void createIndex() throws Exception { PrimaryIndexInfo primaryIndexInfo = StorageTestUtils.createPrimaryIndex(nc, PARTITION); IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); JobId jobId = nc.newJobId(); ctx = nc.createTestContext(jobId, PARTITION, false); indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), PARTITION); indexDataflowHelper.open(); lsmBtree = (TestLsmBtree) indexDataflowHelper.getIndexInstance(); indexDataflowHelper.close(); }