public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime( JobSpecification jobSpec, Feed feed, FeedPolicyAccessor policyAccessor) throws Exception { Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput; factoryOutput = FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx, getApplicationContext()); ARecordType recordType = FeedMetadataUtil.getOutputType(feed, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME)); IAdapterFactory adapterFactory = factoryOutput.first; FeedIntakeOperatorDescriptor feedIngestor = null; switch (factoryOutput.third) { case INTERNAL: feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, adapterFactory, recordType, policyAccessor, factoryOutput.second); break; case EXTERNAL: String libraryName = feed.getConfiguration().get(ExternalDataConstants.KEY_ADAPTER_NAME).trim() .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0]; feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feed, libraryName, adapterFactory.getClass().getName(), recordType, policyAccessor, factoryOutput.second); break; default: break; } AlgebricksPartitionConstraint partitionConstraint = adapterFactory.getPartitionConstraint(); return new Triple<>(feedIngestor, partitionConstraint, adapterFactory); }
public Feed dropFeedIfExists(Feed feed) { synchronized (feeds) { Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName()); if (feedsInDataverse != null) { return feedsInDataverse.remove(feed.getFeedName()); } return null; } }
@Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof Feed)) { return false; } Feed otherFeed = (Feed) other; return otherFeed.getFeedId().equals(feedId); }
public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx) throws AlgebricksException { try { Map<String, String> configuration = feed.getConfiguration(); ARecordType adapterOutputType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_TYPE_NAME)); ARecordType metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME)); ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName()); adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName); String libraryName = anameComponents[0]; ClassLoader cl = appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName); adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance(); break;
protected void doDropFeed(IHyracksClientConnection hcc, MetadataProvider metadataProvider, Feed feed, SourceLocation sourceLoc) throws Exception { MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); EntityId feedId = feed.getFeedId(); ActiveNotificationHandler activeNotificationHandler = (ActiveNotificationHandler) appCtx.getActiveNotificationHandler(); ActiveEntityEventsListener listener = (ActiveEntityEventsListener) activeNotificationHandler.getListener(feedId); if (listener != null && listener.getState() != ActivityState.STOPPED) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Feed " + feedId + " is currently active and connected to the following dataset(s) \n" + listener.toString()); } else if (listener != null) { listener.unregister(); } JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName())); runJob(hcc, spec); MetadataManager.INSTANCE.dropFeed(mdTxnCtx, feed.getDataverseName(), feed.getFeedName()); if (LOGGER.isInfoEnabled()) { LOGGER.info("Removed feed " + feedId); } }
String metaTypeName = FeedUtils.getFeedMetaTypeName(sourceFeed.getConfiguration()); if (metaTypeName == null) { throw new AlgebricksException("Feed to a dataset with metadata doesn't have meta type specified"); if (ExternalDataUtils.isChangeFeed(sourceFeed.getConfiguration())) { List<Mutable<ILogicalExpression>> keyAccessExpression = new ArrayList<>(); keyAccessScalarFunctionCallExpression = new ArrayList<>(); pkTypes, keyAccessScalarFunctionCallExpression, sourceFeed.getFeedId(), FeedRuntimeType.valueOf(subscriptionLocation), locations.split(","), context.getComputationNodeDomain(), feedConnection);
protected boolean isFunctionUsed(MetadataTransactionContext ctx, FunctionSignature signature, String currentDataverse) throws AlgebricksException { List<Dataverse> allDataverses = MetadataManager.INSTANCE.getDataverses(ctx); for (Dataverse dataverse : allDataverses) { if (dataverse.getDataverseName().equals(currentDataverse)) { continue; } List<Feed> feeds = MetadataManager.INSTANCE.getFeeds(ctx, dataverse.getDataverseName()); for (Feed feed : feeds) { List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(ctx, dataverse.getDataverseName(), feed.getFeedName()); for (FeedConnection conn : feedConnections) { if (conn.containsFunction(signature)) { return true; } } } } return false; }
private Feed createFeedFromARecord(ARecord feedRecord) { Feed feed; String dataverseName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX)) .getStringValue(); String feedName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX)) .getStringValue(); AUnorderedList feedConfig = (AUnorderedList) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX); IACursor cursor = feedConfig.getCursor(); // restore configurations String key; String value; Map<String, String> adaptorConfiguration = new HashMap<>(); while (cursor.next()) { ARecord field = (ARecord) cursor.get(); key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue(); value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue(); adaptorConfiguration.put(key, value); } feed = new Feed(dataverseName, feedName, adaptorConfiguration); return feed; }
IDataSourceAdapter.AdapterType adapterType = null; try { Map<String, String> configuration = feed.getConfiguration(); adapterName = configuration.get(ExternalDataConstants.KEY_ADAPTER_NAME); configuration.putAll(policyAccessor.getFeedPolicy()); adapterOutputType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_TYPE_NAME)); metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME)); ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName()); adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName); String libraryName = anameComponents[0]; ClassLoader cl = appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName); adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance(); break;
Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext()); FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getListener(feed.getFeedId()); if (listener != null && listener.isActive()) { throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, sourceLoc, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME)); List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions(); for (FunctionSignature func : appliedFunctions) {
feedConnections = getFeedConnections(txnId, dataverseName, feed.getFeedName()); for (FeedConnection feedConnection : feedConnections) { dropFeedConnection(txnId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName()); dropFeed(txnId, dataverseName, feed.getFeedName());
feed = new Feed(dataverseName, feedName, cfs.getConfiguration()); FeedMetadataUtil.validateFeed(feed, mdTxnCtx, appCtx); MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
public Feed addFeedIfNotExists(Feed feed) { synchronized (feeds) { Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName()); if (feedsInDataverse == null) { feeds.put(feed.getDataverseName(), new HashMap<>()); feedsInDataverse = feeds.get(feed.getDataverseName()); } return feedsInDataverse.put(feed.getFeedName(), feed); } }
public static Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> buildStartFeedJob( MetadataProvider metadataProvider, Feed feed, List<FeedConnection> feedConnections, IStatementExecutor statementExecutor, IHyracksClientConnection hcc) throws Exception { FeedPolicyAccessor fpa = new FeedPolicyAccessor(new HashMap<>()); Pair<JobSpecification, IAdapterFactory> intakeInfo = buildFeedIntakeJobSpec(feed, metadataProvider, fpa); List<JobSpecification> jobsList = new ArrayList<>(); // TODO: Figure out a better way to handle insert/upsert per conn instead of per feed Boolean insertFeed = ExternalDataUtils.isInsertFeed(feed.getConfiguration()); // Construct the ingestion Job JobSpecification intakeJob = intakeInfo.getLeft(); IAdapterFactory ingestionAdaptorFactory = intakeInfo.getRight(); String[] ingestionLocations = ingestionAdaptorFactory.getPartitionConstraint().getLocations(); // Add metadata configs metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, Boolean.TRUE.toString()); metadataProvider.getConfig().put(FeedActivityDetails.COLLECT_LOCATIONS, StringUtils.join(ingestionLocations, ',')); // TODO: Once we deprecated AQL, this extra queryTranslator can be removed. IStatementExecutor translator = getSQLPPTranslator(metadataProvider, ((QueryTranslator) statementExecutor).getSessionOutput()); // Add connection job for (FeedConnection feedConnection : feedConnections) { JobSpecification connectionJob = getConnectionJob(metadataProvider, feedConnection, translator, hcc, insertFeed); jobsList.add(connectionJob); } return Pair.of(combineIntakeCollectJobs(metadataProvider, feed, intakeJob, jobsList, feedConnections, ingestionLocations), intakeInfo.getRight().getPartitionConstraint()); } }
@Override public void addFeed(TxnId txnId, Feed feed) throws AlgebricksException, RemoteException { try { // Insert into the 'Feed' dataset. FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(true); ITupleReference feedTuple = tupleReaderWriter.getTupleFromMetadataEntity(feed); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("A feed with this name " + feed.getFeedName() + " already exists in dataverse '" + feed.getDataverseName() + "'.", e); } else { throw new AlgebricksException(e); } } }
private void writeFeedAdaptorField(IARecordBuilder recordBuilder, Feed feed, ArrayBackedValueStorage fieldValueBuffer) throws HyracksDataException { UnorderedListBuilder listBuilder = new UnorderedListBuilder(); ArrayBackedValueStorage listEleBuffer = new ArrayBackedValueStorage(); listBuilder.reset((AUnorderedListType) MetadataRecordTypes.FEED_RECORDTYPE .getFieldTypes()[MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX]); for (Map.Entry<String, String> property : feed.getConfiguration().entrySet()) { String name = property.getKey(); String value = property.getValue(); listEleBuffer.reset(); writePropertyTypeRecord(name, value, listEleBuffer.getDataOutput()); listBuilder.addItem(listEleBuffer); } listBuilder.write(fieldValueBuffer.getDataOutput(), true); recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX, fieldValueBuffer); }
public static JobSpecification buildRemoveFeedStorageJob(MetadataProvider metadataProvider, Feed feed) throws AsterixException { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); JobSpecification spec = RuntimeUtils.createJobSpecification(appCtx); IClusterStateManager csm = appCtx.getClusterStateManager(); AlgebricksAbsolutePartitionConstraint allCluster = csm.getClusterLocations(); Set<String> nodes = new TreeSet<>(); for (String node : allCluster.getLocations()) { nodes.add(node); } AlgebricksAbsolutePartitionConstraint locations = new AlgebricksAbsolutePartitionConstraint(nodes.toArray(new String[nodes.size()])); FileSplit[] feedLogFileSplits = FeedUtils.splitsForAdapter(appCtx, feed.getDataverseName(), feed.getFeedName(), locations); org.apache.hyracks.algebricks.common.utils.Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spC = StoragePathUtil.splitProviderAndPartitionConstraints(feedLogFileSplits); FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(spec, spC.first, true); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, frod, spC.second); spec.addRoot(frod); return spec; }
aString.setValue(feed.getDataverseName()); stringSerde.serialize(aString, tupleBuilder.getDataOutput()); tupleBuilder.addFieldEndOffset(); aString.setValue(feed.getFeedName()); stringSerde.serialize(aString, tupleBuilder.getDataOutput()); tupleBuilder.addFieldEndOffset(); aString.setValue(feed.getDataverseName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue); aString.setValue(feed.getFeedName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX, fieldValue);