Tabnine Logo
StreamValidationException
Code IndexAdd Tabnine to your IDE (free)

How to use
StreamValidationException
in
org.apache.samza.system

Best Java code snippets using org.apache.samza.system.StreamValidationException (Showing top 7 results out of 315)

origin: apache/samza

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: apache/samza

@Test(expected = StreamValidationException.class)
public void testStartFailsOnTopicValidationErrors() {
 KafkaStreamSpec checkpointSpec = new KafkaStreamSpec(CHECKPOINT_TOPIC, CHECKPOINT_TOPIC,
   CHECKPOINT_SYSTEM, 1);
 // create an admin that throws an exception during validateStream
 SystemAdmin mockAdmin = newAdmin("0", "10");
 doThrow(new StreamValidationException("invalid stream")).when(mockAdmin).validateStream(checkpointSpec);
 SystemFactory factory = newFactory(mock(SystemProducer.class), mock(SystemConsumer.class), mockAdmin);
 KafkaCheckpointManager checkpointManager = new KafkaCheckpointManager(checkpointSpec, factory,
   true, mock(Config.class), mock(MetricsRegistry.class), null, new KafkaCheckpointLogKeySerde());
 // expect an exception during startup
 checkpointManager.createResources();
 checkpointManager.start();
}
origin: apache/samza

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka_2.11

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
org.apache.samza.systemStreamValidationException

Most used methods

  • <init>

Popular in Java

  • Parsing JSON documents to java classes using gson
  • onRequestPermissionsResult (Fragment)
  • getResourceAsStream (ClassLoader)
  • startActivity (Activity)
  • Color (java.awt)
    The Color class is used to encapsulate colors in the default sRGB color space or colors in arbitrary
  • FileReader (java.io)
    A specialized Reader that reads from a file in the file system. All read requests made by calling me
  • Date (java.sql)
    A class which can consume and produce dates in SQL Date format. Dates are represented in SQL as yyyy
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • Runner (org.openjdk.jmh.runner)
  • Option (scala)
  • Github Copilot alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now