Codota Logo
org.apache.samza.system.kafka
Code IndexAdd Codota to your IDE (free)

How to use org.apache.samza.system.kafka

Best Java code snippets using org.apache.samza.system.kafka (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
BufferedReader b =
  • Codota IconInputStream in;new BufferedReader(new InputStreamReader(in))
  • Codota IconReader in;new BufferedReader(in)
  • Codota IconFile file;new BufferedReader(new FileReader(file))
  • Smart code suggestions by Codota
}
origin: apache/samza

@Override
public StreamSpec copyWithPartitionCount(int partitionCount) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), partitionCount, getReplicationFactor(),
   getProperties());
}
origin: apache/samza

/**
 * Make a copy of the spec with new properties
 * @param properties properties of the Kafka stream
 * @return new instance of {@link KafkaStreamSpec}
 */
public KafkaStreamSpec copyWithProperties(Properties properties) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), getReplicationFactor(),
   properties);
}
origin: apache/samza

 public Properties getProperties() {
  return mapToProperties(getConfig());
 }
}
origin: apache/samza

public KafkaStreamSpec copyWithReplicationFactor(int replicationFactor) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), replicationFactor,
   getProperties());
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public boolean createStream(StreamSpec streamSpec) {
 LOG.info("Creating Kafka topic: {} on system: {}", streamSpec.getPhysicalName(), streamSpec.getSystemName());
 return KafkaSystemAdminUtilsScala.createStream(toKafkaSpec(streamSpec), getZkConnection());
}
origin: apache/samza

/**
 * Delete records up to (and including) the provided ssp offsets for
 * all system stream partitions specified in the map.
 * This only works with Kafka cluster 0.11 or later. Otherwise it's a no-op.
 * @param offsets specifies up to what offsets the messages should be deleted
 */
@Override
public void deleteMessages(Map<SystemStreamPartition, String> offsets) {
 if (deleteCommittedMessages) {
  if (adminClientForDelete == null) {
   adminClientForDelete = kafka.admin.AdminClient.create(createAdminClientProperties());
  }
  KafkaSystemAdminUtilsScala.deleteMessages(adminClientForDelete, offsets);
  deleteMessageCalled = true;
 }
}
origin: apache/samza

@Override
public void close(long timeout, TimeUnit timeUnit) {
 closed = true;
 // The real producer will flush messages as part of closing. We'll invoke flush here to approximate that behavior.
 new FlushRunnable(0).run();
}
origin: apache/samza

@Test
public void testConfigValidations() {
 final KafkaSystemConsumer consumer = createConsumer(FETCH_THRESHOLD_MSGS, FETCH_THRESHOLD_BYTES);
 consumer.start();
 // should be no failures
}
origin: apache/samza

@Test(expected = StreamValidationException.class)
public void testValidateStreamDoesNotExist() {
 StreamSpec spec = new StreamSpec("testId", "testStreamNameExist", "testSystem", 8);
 systemAdmin().validateStream(spec);
}
origin: apache/samza

@Override
public Future<RecordMetadata> send(ProducerRecord record) {
 return send(record, null);
}
origin: apache/samza

@Override
public void close() {
 close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
}
origin: org.apache.samza/samza-kafka_2.11

@Override
public StreamSpec copyWithPartitionCount(int partitionCount) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), partitionCount, getReplicationFactor(),
   getProperties());
}
origin: org.apache.samza/samza-kafka

/**
 * Make a copy of the spec with new properties
 * @param properties properties of the Kafka stream
 * @return new instance of {@link KafkaStreamSpec}
 */
public KafkaStreamSpec copyWithProperties(Properties properties) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), getReplicationFactor(),
   properties);
}
origin: org.apache.samza/samza-kafka

public KafkaStreamSpec copyWithReplicationFactor(int replicationFactor) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), replicationFactor,
   getProperties());
}
origin: org.apache.samza/samza-kafka

 public Properties getProperties() {
  return mapToProperties(getConfig());
 }
}
origin: apache/samza

public synchronized void flush () {
 new FlushRunnable(0).run();
}
origin: org.apache.samza/samza-kafka

@Override
public StreamSpec copyWithPartitionCount(int partitionCount) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), partitionCount, getReplicationFactor(),
   getProperties());
}
origin: org.apache.samza/samza-kafka_2.11

/**
 * Make a copy of the spec with new properties
 * @param properties properties of the Kafka stream
 * @return new instance of {@link KafkaStreamSpec}
 */
public KafkaStreamSpec copyWithProperties(Properties properties) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), getReplicationFactor(),
   properties);
}
origin: org.apache.samza/samza-kafka_2.11

public KafkaStreamSpec copyWithReplicationFactor(int replicationFactor) {
 return new KafkaStreamSpec(getId(), getPhysicalName(), getSystemName(), getPartitionCount(), replicationFactor,
   getProperties());
}
origin: org.apache.samza/samza-kafka_2.11

 public Properties getProperties() {
  return mapToProperties(getConfig());
 }
}
org.apache.samza.system.kafka

Most used classes

  • KafkaStreamSpec
    Extends StreamSpec with the ability to easily get the topic replication factor.
  • KafkaSystemAdmin
  • KafkaSystemConsumer$KafkaConsumerMessageSink
  • KafkaSystemConsumer
  • KafkaSystemConsumerMetrics
  • KafkaSystemDescriptor,
  • ChangelogInfo,
  • KafkaConsumerProxy,
  • KafkaSystemAdmin$OffsetsMaps,
  • KafkaSystemAdminUtilsScala,
  • KafkaOutputDescriptor,
  • KafkaSystemFactory,
  • KafkaSystemProducer,
  • KafkaSystemProducerMetrics,
  • MockKafkaProducer$FlushRunnable,
  • MockKafkaProducer$FutureFailure,
  • MockKafkaProducer$FutureSuccess,
  • MockKafkaProducer,
  • TestKafkaSystemAdminJava
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now