congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
StreamValidationException.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.samza.system.StreamValidationException
constructor

Best Java code snippets using org.apache.samza.system.StreamValidationException.<init> (Showing top 7 results out of 315)

origin: apache/samza

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: apache/samza

@Test(expected = StreamValidationException.class)
public void testStartFailsOnTopicValidationErrors() {
 KafkaStreamSpec checkpointSpec = new KafkaStreamSpec(CHECKPOINT_TOPIC, CHECKPOINT_TOPIC,
   CHECKPOINT_SYSTEM, 1);
 // create an admin that throws an exception during validateStream
 SystemAdmin mockAdmin = newAdmin("0", "10");
 doThrow(new StreamValidationException("invalid stream")).when(mockAdmin).validateStream(checkpointSpec);
 SystemFactory factory = newFactory(mock(SystemProducer.class), mock(SystemConsumer.class), mockAdmin);
 KafkaCheckpointManager checkpointManager = new KafkaCheckpointManager(checkpointSpec, factory,
   true, mock(Config.class), mock(MetricsRegistry.class), null, new KafkaCheckpointLogKeySerde());
 // expect an exception during startup
 checkpointManager.createResources();
 checkpointManager.start();
}
origin: apache/samza

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka_2.11

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
org.apache.samza.systemStreamValidationException<init>

Popular methods of StreamValidationException

    Popular in Java

    • Creating JSON documents from java classes using gson
    • getOriginalFilename (MultipartFile)
      Return the original filename in the client's filesystem.This may contain path information depending
    • setRequestProperty (URLConnection)
    • setContentView (Activity)
    • GridLayout (java.awt)
      The GridLayout class is a layout manager that lays out a container's components in a rectangular gri
    • FileReader (java.io)
      A specialized Reader that reads from a file in the file system. All read requests made by calling me
    • LinkedList (java.util)
      Doubly-linked list implementation of the List and Dequeinterfaces. Implements all optional list oper
    • CountDownLatch (java.util.concurrent)
      A synchronization aid that allows one or more threads to wait until a set of operations being perfor
    • HttpServlet (javax.servlet.http)
      Provides an abstract class to be subclassed to create an HTTP servlet suitable for a Web site. A sub
    • JButton (javax.swing)
    • Top PhpStorm plugins
    Tabnine Logo
    • Products

      Search for Java codeSearch for JavaScript code
    • IDE Plugins

      IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
    • Company

      About UsContact UsCareers
    • Resources

      FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
    Get Tabnine for your IDE now