Tabnine Logo
StreamValidationException
Code IndexAdd Tabnine to your IDE (free)

How to use
StreamValidationException
in
org.apache.samza.system

Best Java code snippets using org.apache.samza.system.StreamValidationException (Showing top 7 results out of 315)

origin: apache/samza

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: org.apache.samza/samza-kafka

@Override
public void validateStream(StreamSpec streamSpec) throws StreamValidationException {
 LOG.info("About to validate stream = " + streamSpec);
 String streamName = streamSpec.getPhysicalName();
 SystemStreamMetadata systemStreamMetadata =
   getSystemStreamMetadata(Collections.singleton(streamName)).get(streamName);
 if (systemStreamMetadata == null) {
  throw new StreamValidationException(
    "Failed to obtain metadata for stream " + streamName + ". Validation failed.");
 }
 int actualPartitionCounter = systemStreamMetadata.getSystemStreamPartitionMetadata().size();
 int expectedPartitionCounter = streamSpec.getPartitionCount();
 LOG.info("actualCount=" + actualPartitionCounter + "; expectedCount=" + expectedPartitionCounter);
 if (actualPartitionCounter != expectedPartitionCounter) {
  throw new StreamValidationException(
    String.format("Mismatch of partitions for stream %s. Expected %d, got %d. Validation failed.", streamName,
      expectedPartitionCounter, actualPartitionCounter));
 }
}
origin: apache/samza

@Test(expected = StreamValidationException.class)
public void testStartFailsOnTopicValidationErrors() {
 KafkaStreamSpec checkpointSpec = new KafkaStreamSpec(CHECKPOINT_TOPIC, CHECKPOINT_TOPIC,
   CHECKPOINT_SYSTEM, 1);
 // create an admin that throws an exception during validateStream
 SystemAdmin mockAdmin = newAdmin("0", "10");
 doThrow(new StreamValidationException("invalid stream")).when(mockAdmin).validateStream(checkpointSpec);
 SystemFactory factory = newFactory(mock(SystemProducer.class), mock(SystemConsumer.class), mockAdmin);
 KafkaCheckpointManager checkpointManager = new KafkaCheckpointManager(checkpointSpec, factory,
   true, mock(Config.class), mock(MetricsRegistry.class), null, new KafkaCheckpointLogKeySerde());
 // expect an exception during startup
 checkpointManager.createResources();
 checkpointManager.start();
}
origin: apache/samza

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
origin: org.apache.samza/samza-kafka_2.11

/**
 * Converts a StreamSpec into a KafakStreamSpec. Special handling for coordinator and changelog stream.
 * @param spec a StreamSpec object
 * @return KafkaStreamSpec object
 */
public KafkaStreamSpec toKafkaSpec(StreamSpec spec) {
 KafkaStreamSpec kafkaSpec;
 if (spec.isChangeLogStream()) {
  String topicName = spec.getPhysicalName();
  ChangelogInfo topicMeta = changelogTopicMetaInformation.get(topicName);
  if (topicMeta == null) {
   throw new StreamValidationException("Unable to find topic information for topic " + topicName);
  }
  kafkaSpec = new KafkaStreamSpec(spec.getId(), topicName, systemName, spec.getPartitionCount(),
    topicMeta.replicationFactor(), topicMeta.kafkaProps());
 } else if (spec.isCoordinatorStream()) {
  kafkaSpec =
    new KafkaStreamSpec(spec.getId(), spec.getPhysicalName(), systemName, 1, coordinatorStreamReplicationFactor,
      coordinatorStreamProperties);
 } else if (intermediateStreamProperties.containsKey(spec.getId())) {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec).copyWithProperties(intermediateStreamProperties.get(spec.getId()));
 } else {
  kafkaSpec = KafkaStreamSpec.fromSpec(spec);
 }
 return kafkaSpec;
}
org.apache.samza.systemStreamValidationException

Most used methods

  • <init>

Popular in Java

  • Creating JSON documents from java classes using gson
  • getSupportFragmentManager (FragmentActivity)
  • compareTo (BigDecimal)
  • setRequestProperty (URLConnection)
  • Path (java.nio.file)
  • Format (java.text)
    The base class for all formats. This is an abstract base class which specifies the protocol for clas
  • Collections (java.util)
    This class consists exclusively of static methods that operate on or return collections. It contains
  • TimeZone (java.util)
    TimeZone represents a time zone offset, and also figures out daylight savings. Typically, you get a
  • ThreadPoolExecutor (java.util.concurrent)
    An ExecutorService that executes each submitted task using one of possibly several pooled threads, n
  • IsNull (org.hamcrest.core)
    Is the value null?
  • Top 17 PhpStorm Plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now