Tabnine Logo
StreamValidationException.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.samza.system.StreamValidationException
constructor

Best Java code snippets using org.apache.samza.system.StreamValidationException.<init> (Showing top 7 results out of 315)

origin: apache/samza

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: apache/samza

@Test(expected = StreamValidationException.class)
public void testStartFailsOnTopicValidationErrors() {
 KafkaStreamSpec checkpointSpec = new KafkaStreamSpec(CHECKPOINT_TOPIC, CHECKPOINT_TOPIC,
   CHECKPOINT_SYSTEM, 1);
 // create an admin that throws an exception during validateStream
 SystemAdmin mockAdmin = newAdmin("0", "10");
 doThrow(new StreamValidationException("invalid stream")).when(mockAdmin).validateStream(checkpointSpec);
 SystemFactory factory = newFactory(mock(SystemProducer.class), mock(SystemConsumer.class), mockAdmin);
 KafkaCheckpointManager checkpointManager = new KafkaCheckpointManager(checkpointSpec, factory,
   true, mock(Config.class), mock(MetricsRegistry.class), null, new KafkaCheckpointLogKeySerde());
 // expect an exception during startup
 checkpointManager.createResources();
 checkpointManager.start();
}
origin: apache/samza

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka_2.11

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
org.apache.samza.systemStreamValidationException<init>

Popular methods of StreamValidationException

    Popular in Java

    • Making http requests using okhttp
    • onCreateOptionsMenu (Activity)
    • findViewById (Activity)
    • notifyDataSetChanged (ArrayAdapter)
    • BorderLayout (java.awt)
      A border layout lays out a container, arranging and resizing its components to fit in five regions:
    • Component (java.awt)
      A component is an object having a graphical representation that can be displayed on the screen and t
    • Dictionary (java.util)
      Note: Do not use this class since it is obsolete. Please use the Map interface for new implementatio
    • Properties (java.util)
      A Properties object is a Hashtable where the keys and values must be Strings. Each property can have
    • JComboBox (javax.swing)
    • Scheduler (org.quartz)
      This is the main interface of a Quartz Scheduler. A Scheduler maintains a registry of org.quartz.Job
    • Top 15 Vim Plugins
    Tabnine Logo
    • Products

      Search for Java codeSearch for JavaScript code
    • IDE Plugins

      IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
    • Company

      About UsContact UsCareers
    • Resources

      FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
    Get Tabnine for your IDE now