@Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException { if (adaptorFactory == null) { adaptorFactory = createExternalAdapterFactory(ctx); } return new FeedIntakeOperatorNodePushable(ctx, feedId, adaptorFactory, partition, recordDescProvider, this); }
(FeedIntakeOperatorDescriptor) intakeJob.getOperatorMap().get(new OperatorDescriptorId(0)); FeedIntakeOperatorDescriptor ingestionOp; if (firstOp.getAdaptorFactory() == null) { ingestionOp = new FeedIntakeOperatorDescriptor(jobSpec, feed, firstOp.getAdaptorLibraryName(), firstOp.getAdaptorFactoryClassName(), firstOp.getAdapterOutputType(), firstOp.getPolicyAccessor(), firstOp.getOutputRecordDescriptors()[0]); } else { ingestionOp = new FeedIntakeOperatorDescriptor(jobSpec, feed, firstOp.getAdaptorFactory(), firstOp.getAdapterOutputType(), firstOp.getPolicyAccessor(), firstOp.getOutputRecordDescriptors()[0]); new ReplicateOperatorDescriptor(jobSpec, ingestionOp.getOutputRecordDescriptors()[0], jobsList.size()); jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), ingestionOp, 0, replicateOp, 0); PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, ingestionOp, intakeLocations); Map<OperatorDescriptorId, IOperatorDescriptor> operatorsMap = subJob.getOperatorMap(); String datasetName = feedConnections.get(iter1).getDatasetName(); FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(), datasetName);
public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime( JobSpecification jobSpec, Feed feed, FeedPolicyAccessor policyAccessor) throws Exception { Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput; factoryOutput = FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx, getApplicationContext()); ARecordType recordType = FeedMetadataUtil.getOutputType(feed, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME)); IAdapterFactory adapterFactory = factoryOutput.first; FeedIntakeOperatorDescriptor feedIngestor = null; switch (factoryOutput.third) { case INTERNAL: feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, adapterFactory, recordType, policyAccessor, factoryOutput.second); break; case EXTERNAL: String libraryName = feed.getConfiguration().get(ExternalDataConstants.KEY_ADAPTER_NAME).trim() .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0]; feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, libraryName, adapterFactory.getClass().getName(), recordType, policyAccessor, factoryOutput.second); break; default: break; } AlgebricksPartitionConstraint partitionConstraint = adapterFactory.getPartitionConstraint(); return new Triple<>(feedIngestor, partitionConstraint, adapterFactory); }
public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, EntityId feedId, IAdapterFactory adapterFactory, int partition, IRecordDescriptorProvider recordDescProvider, FeedIntakeOperatorDescriptor feedIntakeOperatorDescriptor) throws HyracksDataException { super(ctx, new ActiveRuntimeId(feedId, FeedIntakeOperatorNodePushable.class.getSimpleName(), partition)); this.opDesc = feedIntakeOperatorDescriptor; this.recordDesc = recordDescProvider.getOutputRecordDescriptor(opDesc.getActivityId(), 0); adapter = (FeedAdapter) adapterFactory.createAdapter(ctx, runtimeId.getPartition()); }