Merge pull request #14021 from garyrussell:kafkaStreams

* pr/14021:
  Polish "Add Kafka Streams auto-configuration"
  Add Kafka Streams auto-configuration
pull/14254/head
Stephane Nicoll 6 years ago
commit f8ce714c88

@ -132,6 +132,11 @@
<artifactId>jest</artifactId> <artifactId>jest</artifactId>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>

@ -52,7 +52,8 @@ import org.springframework.kafka.transaction.KafkaTransactionManager;
@Configuration @Configuration
@ConditionalOnClass(KafkaTemplate.class) @ConditionalOnClass(KafkaTemplate.class)
@EnableConfigurationProperties(KafkaProperties.class) @EnableConfigurationProperties(KafkaProperties.class)
@Import(KafkaAnnotationDrivenConfiguration.class) @Import({ KafkaAnnotationDrivenConfiguration.class,
KafkaStreamsAnnotationDrivenConfiguration.class })
public class KafkaAutoConfiguration { public class KafkaAutoConfiguration {
private final KafkaProperties properties; private final KafkaProperties properties;

@ -57,7 +57,7 @@ public class KafkaProperties {
/** /**
* Comma-delimited list of host:port pairs to use for establishing the initial * Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. * connection to the Kafka cluster. Applies to all components unless overridden.
*/ */
private List<String> bootstrapServers = new ArrayList<>( private List<String> bootstrapServers = new ArrayList<>(
Collections.singletonList("localhost:9092")); Collections.singletonList("localhost:9092"));
@ -79,6 +79,8 @@ public class KafkaProperties {
private final Admin admin = new Admin(); private final Admin admin = new Admin();
private final Streams streams = new Streams();
private final Listener listener = new Listener(); private final Listener listener = new Listener();
private final Ssl ssl = new Ssl(); private final Ssl ssl = new Ssl();
@ -123,6 +125,10 @@ public class KafkaProperties {
return this.admin; return this.admin;
} }
public Streams getStreams() {
return this.streams;
}
public Ssl getSsl() { public Ssl getSsl() {
return this.ssl; return this.ssl;
} }
@ -193,6 +199,19 @@ public class KafkaProperties {
return properties; return properties;
} }
/**
* Create an initial map of streams properties from the state of this instance.
* <p>
* This allows you to add additional properties, if necessary.
* @return the streams properties initialized with the customizations defined on this
* instance
*/
public Map<String, Object> buildStreamsProperties() {
Map<String, Object> properties = buildCommonProperties();
properties.putAll(this.streams.buildProperties());
return properties;
}
public static class Consumer { public static class Consumer {
private final Ssl ssl = new Ssl(); private final Ssl ssl = new Ssl();
@ -211,7 +230,7 @@ public class KafkaProperties {
/** /**
* Comma-delimited list of host:port pairs to use for establishing the initial * Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. * connection to the Kafka cluster. Overrides the global property, for consumers.
*/ */
private List<String> bootstrapServers; private List<String> bootstrapServers;
@ -421,7 +440,7 @@ public class KafkaProperties {
/** /**
* Comma-delimited list of host:port pairs to use for establishing the initial * Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. * connection to the Kafka cluster. Overrides the global property, for producers.
*/ */
private List<String> bootstrapServers; private List<String> bootstrapServers;
@ -631,6 +650,136 @@ public class KafkaProperties {
} }
/**
* High (and some medium) priority Streams properties and a general properties bucket.
*/
public static class Streams {
private final Ssl ssl = new Ssl();
/**
* Kafka streams application.id property; default spring.application.name.
*/
private String applicationId;
/**
* Whether or not to auto-start the streams factory bean.
*/
private boolean autoStartup = true;
/**
* Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. Overrides the global property, for streams.
*/
private List<String> bootstrapServers;
/**
* Maximum number of memory bytes to be used for buffering across all threads.
*/
private Integer cacheMaxBytesBuffering;
/**
* ID to pass to the server when making requests. Used for server-side logging.
*/
private String clientId;
/**
* The replication factor for change log topics and repartition topics created by
* the stream processing application.
*/
private Integer replicationFactor;
/**
* Directory location for the state store.
*/
private String stateDir;
/**
* Additional Kafka properties used to configure the streams.
*/
private final Map<String, String> properties = new HashMap<>();
public Ssl getSsl() {
return this.ssl;
}
public String getApplicationId() {
return this.applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public boolean isAutoStartup() {
return this.autoStartup;
}
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
public List<String> getBootstrapServers() {
return this.bootstrapServers;
}
public void setBootstrapServers(List<String> bootstrapServers) {
this.bootstrapServers = bootstrapServers;
}
public Integer getCacheMaxBytesBuffering() {
return this.cacheMaxBytesBuffering;
}
public void setCacheMaxBytesBuffering(Integer cacheMaxBytesBuffering) {
this.cacheMaxBytesBuffering = cacheMaxBytesBuffering;
}
public String getClientId() {
return this.clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public Integer getReplicationFactor() {
return this.replicationFactor;
}
public void setReplicationFactor(Integer replicationFactor) {
this.replicationFactor = replicationFactor;
}
public String getStateDir() {
return this.stateDir;
}
public void setStateDir(String stateDir) {
this.stateDir = stateDir;
}
public Map<String, String> getProperties() {
return this.properties;
}
public Map<String, Object> buildProperties() {
Properties properties = new Properties();
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
map.from(this::getApplicationId).to(properties.in("application.id"));
map.from(this::getBootstrapServers)
.to(properties.in(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG));
map.from(this::getCacheMaxBytesBuffering)
.to(properties.in("cache.max.bytes.buffering"));
map.from(this::getClientId)
.to(properties.in(CommonClientConfigs.CLIENT_ID_CONFIG));
map.from(this::getReplicationFactor).to(properties.in("replication.factor"));
map.from(this::getStateDir).to(properties.in("state.dir"));
return properties.with(this.ssl, this.properties);
}
}
public static class Template { public static class Template {
/** /**
@ -1011,6 +1160,7 @@ public class KafkaProperties {
} }
@SuppressWarnings("serial")
private static class Properties extends HashMap<String, Object> { private static class Properties extends HashMap<String, Object> {
public <V> java.util.function.Consumer<V> in(String key) { public <V> java.util.function.Consumer<V> in(String key) {

@ -0,0 +1,98 @@
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.kafka;
import java.util.Map;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.source.InvalidConfigurationPropertyValueException;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.core.StreamsBuilderFactoryBean;
/**
* Configuration for Kafka Streams annotation-driven support.
*
* @author Gary Russell
* @author Stephane Nicoll
*/
@Configuration
@ConditionalOnClass(StreamsBuilder.class)
@ConditionalOnBean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_BUILDER_BEAN_NAME)
class KafkaStreamsAnnotationDrivenConfiguration {
private final KafkaProperties properties;
KafkaStreamsAnnotationDrivenConfiguration(KafkaProperties properties) {
this.properties = properties;
}
@ConditionalOnMissingBean
@Bean(KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration defaultKafkaStreamsConfig(Environment environment) {
Map<String, Object> streamsProperties = this.properties.buildStreamsProperties();
if (this.properties.getStreams().getApplicationId() == null) {
String applicationName = environment.getProperty("spring.application.name");
if (applicationName != null) {
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG,
applicationName);
}
else {
throw new InvalidConfigurationPropertyValueException(
"spring.kafka.streams.application-id", null,
"This property is mandatory and fallback 'spring.application.name' is not set either.");
}
}
return new KafkaStreamsConfiguration(streamsProperties);
}
@Bean
public KafkaStreamsFactoryBeanConfigurer kafkaStreamsFactoryBeanConfigurer(
StreamsBuilderFactoryBean factoryBean) {
return new KafkaStreamsFactoryBeanConfigurer(this.properties, factoryBean);
}
// Separate class required to avoid BeanCurrentlyInCreationException
static class KafkaStreamsFactoryBeanConfigurer implements InitializingBean {
private final KafkaProperties properties;
private final StreamsBuilderFactoryBean factoryBean;
KafkaStreamsFactoryBeanConfigurer(KafkaProperties properties,
StreamsBuilderFactoryBean factoryBean) {
this.properties = properties;
this.factoryBean = factoryBean;
}
@Override
public void afterPropertiesSet() {
this.factoryBean.setAutoStartup(this.properties.getStreams().isAutoStartup());
}
}
}

@ -28,9 +28,12 @@ import org.junit.Test;
import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.StreamsBuilderFactoryBean;
import org.springframework.kafka.support.KafkaHeaders; import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule; import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.messaging.handler.annotation.Header; import org.springframework.messaging.handler.annotation.Header;
@ -41,6 +44,7 @@ import static org.assertj.core.api.Assertions.assertThat;
* Integration tests for {@link KafkaAutoConfiguration}. * Integration tests for {@link KafkaAutoConfiguration}.
* *
* @author Gary Russell * @author Gary Russell
* @author Stephane Nicoll
*/ */
public class KafkaAutoConfigurationIntegrationTests { public class KafkaAutoConfigurationIntegrationTests {
@ -83,6 +87,14 @@ public class KafkaAutoConfigurationIntegrationTests {
producer.close(); producer.close();
} }
@Test
public void testStreams() {
load(KafkaStreamsConfig.class, "spring.application.name:my-app",
"spring.kafka.bootstrap-servers:" + getEmbeddedKafkaBrokersAsString());
assertThat(this.context.getBean(StreamsBuilderFactoryBean.class).isAutoStartup())
.isTrue();
}
private void load(Class<?> config, String... environment) { private void load(Class<?> config, String... environment) {
this.context = doLoad(new Class<?>[] { config }, environment); this.context = doLoad(new Class<?>[] { config }, environment);
} }
@ -101,7 +113,8 @@ public class KafkaAutoConfigurationIntegrationTests {
return embeddedKafka.getEmbeddedKafka().getBrokersAsString(); return embeddedKafka.getEmbeddedKafka().getBrokersAsString();
} }
public static class KafkaConfig { @Configuration
static class KafkaConfig {
@Bean @Bean
public Listener listener() { public Listener listener() {
@ -115,6 +128,12 @@ public class KafkaAutoConfigurationIntegrationTests {
} }
@Configuration
@EnableKafkaStreams
static class KafkaStreamsConfig {
}
public static class Listener { public static class Listener {
private final CountDownLatch latch = new CountDownLatch(1); private final CountDownLatch latch = new CountDownLatch(1);

@ -18,7 +18,9 @@ package org.springframework.boot.autoconfigure.kafka;
import java.io.File; import java.io.File;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Properties;
import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry;
@ -30,6 +32,8 @@ import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.DirectFieldAccessor; import org.springframework.beans.DirectFieldAccessor;
@ -37,8 +41,11 @@ import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaAdmin; import org.springframework.kafka.core.KafkaAdmin;
@ -273,6 +280,128 @@ public class KafkaAutoConfigurationTests {
}); });
} }
@Test
public void streamsProperties() {
this.contextRunner.withUserConfiguration(EnableKafkaStreamsConfiguration.class)
.withPropertyValues("spring.kafka.client-id=cid",
"spring.kafka.bootstrap-servers=localhost:9092,localhost:9093",
"spring.application.name=appName",
"spring.kafka.properties.foo.bar.baz=qux.fiz.buz",
"spring.kafka.streams.auto-startup=false",
"spring.kafka.streams.cache-max-bytes-buffering=42",
"spring.kafka.streams.client-id=override",
"spring.kafka.streams.properties.fiz.buz=fix.fox",
"spring.kafka.streams.replication-factor=2",
"spring.kafka.streams.state-dir=/tmp/state",
"spring.kafka.streams.ssl.key-password=p7",
"spring.kafka.streams.ssl.key-store-location=classpath:ksLocP",
"spring.kafka.streams.ssl.key-store-password=p8",
"spring.kafka.streams.ssl.key-store-type=PKCS12",
"spring.kafka.streams.ssl.trust-store-location=classpath:tsLocP",
"spring.kafka.streams.ssl.trust-store-password=p9",
"spring.kafka.streams.ssl.trust-store-type=PKCS12",
"spring.kafka.streams.ssl.protocol=TLSv1.2")
.run((context) -> {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9092, localhost:9093");
assertThat(
configs.get(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG))
.isEqualTo("42");
assertThat(configs.get(StreamsConfig.CLIENT_ID_CONFIG))
.isEqualTo("override");
assertThat(configs.get(StreamsConfig.REPLICATION_FACTOR_CONFIG))
.isEqualTo("2");
assertThat(configs.get(StreamsConfig.STATE_DIR_CONFIG))
.isEqualTo("/tmp/state");
assertThat(configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG))
.isEqualTo("p7");
assertThat(
(String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
.endsWith(File.separator + "ksLocP");
assertThat(configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG))
.isEqualTo("p8");
assertThat(configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG))
.isEqualTo("PKCS12");
assertThat((String) configs
.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
.endsWith(File.separator + "tsLocP");
assertThat(configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG))
.isEqualTo("p9");
assertThat(configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG))
.isEqualTo("PKCS12");
assertThat(configs.get(SslConfigs.SSL_PROTOCOL_CONFIG))
.isEqualTo("TLSv1.2");
assertThat(
context.getBeansOfType(KafkaJaasLoginModuleInitializer.class))
.isEmpty();
assertThat(configs.get("foo.bar.baz")).isEqualTo("qux.fiz.buz");
assertThat(configs.get("fiz.buz")).isEqualTo("fix.fox");
assertThat(context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_BUILDER_BEAN_NAME))
.isNotNull();
});
}
@Test
public void streamsApplicationIdUsesMainApplicationNameByDefault() {
this.contextRunner.withUserConfiguration(EnableKafkaStreamsConfiguration.class)
.withPropertyValues("spring.application.name=my-test-app",
"spring.kafka.bootstrap-servers=localhost:9092,localhost:9093",
"spring.kafka.streams.auto-startup=false")
.run((context) -> {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9092, localhost:9093");
assertThat(configs.get(StreamsConfig.APPLICATION_ID_CONFIG))
.isEqualTo("my-test-app");
});
}
@Test
public void streamsWithCustomKafkaConfiguration() {
this.contextRunner
.withUserConfiguration(EnableKafkaStreamsConfiguration.class,
TestKafkaStreamsConfiguration.class)
.withPropertyValues("spring.application.name=my-test-app",
"spring.kafka.bootstrap-servers=localhost:9092,localhost:9093",
"spring.kafka.streams.auto-startup=false")
.run((context) -> {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9094, localhost:9095");
assertThat(configs.get(StreamsConfig.APPLICATION_ID_CONFIG))
.isEqualTo("test-id");
});
}
@Test
public void streamsApplicationIdIsMandatory() {
this.contextRunner.withUserConfiguration(EnableKafkaStreamsConfiguration.class)
.run((context) -> {
assertThat(context).hasFailed();
assertThat(context).getFailure()
.hasMessageContaining("spring.kafka.streams.application-id")
.hasMessageContaining(
"This property is mandatory and fallback 'spring.application.name' is not set either.");
});
}
@Test
public void streamsApplicationIdIsNotMandatoryIfEnableKafkaStreamsIsNotSet() {
this.contextRunner.run((context) -> {
assertThat(context).hasNotFailed();
assertThat(context).doesNotHaveBean(StreamsBuilder.class);
});
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void listenerProperties() { public void listenerProperties() {
@ -404,4 +533,25 @@ public class KafkaAutoConfigurationTests {
} }
@Configuration
@EnableKafkaStreams
protected static class EnableKafkaStreamsConfiguration {
}
@Configuration
protected static class TestKafkaStreamsConfiguration {
@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration kafkaStreamsConfiguration() {
Map<String, Object> streamsProperties = new HashMap<>();
streamsProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9094, localhost:9095");
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-id");
return new KafkaStreamsConfiguration(streamsProperties);
}
}
} }

@ -397,6 +397,11 @@
<artifactId>commons-dbcp2</artifactId> <artifactId>commons-dbcp2</artifactId>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<optional>true</optional>
</dependency>
<dependency> <dependency>
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId> <artifactId>log4j-api</artifactId>

@ -1039,11 +1039,11 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.admin.ssl.trust-store-location= # Location of the trust store file. spring.kafka.admin.ssl.trust-store-location= # Location of the trust store file.
spring.kafka.admin.ssl.trust-store-password= # Store password for the trust store file. spring.kafka.admin.ssl.trust-store-password= # Store password for the trust store file.
spring.kafka.admin.ssl.trust-store-type= # Type of the trust store. spring.kafka.admin.ssl.trust-store-type= # Type of the trust store.
spring.kafka.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. spring.kafka.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Applies to all components unless overridden.
spring.kafka.client-id= # ID to pass to the server when making requests. Used for server-side logging. spring.kafka.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.consumer.auto-commit-interval= # Frequency with which the consumer offsets are auto-committed to Kafka if 'enable.auto.commit' is set to true. spring.kafka.consumer.auto-commit-interval= # Frequency with which the consumer offsets are auto-committed to Kafka if 'enable.auto.commit' is set to true.
spring.kafka.consumer.auto-offset-reset= # What to do when there is no initial offset in Kafka or if the current offset no longer exists on the server. spring.kafka.consumer.auto-offset-reset= # What to do when there is no initial offset in Kafka or if the current offset no longer exists on the server.
spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for consumers.
spring.kafka.consumer.client-id= # ID to pass to the server when making requests. Used for server-side logging. spring.kafka.consumer.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.consumer.enable-auto-commit= # Whether the consumer's offset is periodically committed in the background. spring.kafka.consumer.enable-auto-commit= # Whether the consumer's offset is periodically committed in the background.
spring.kafka.consumer.fetch-max-wait= # Maximum amount of time the server blocks before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by "fetch.min.bytes". spring.kafka.consumer.fetch-max-wait= # Maximum amount of time the server blocks before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by "fetch.min.bytes".
@ -1079,7 +1079,7 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.listener.type=single # Listener type. spring.kafka.listener.type=single # Listener type.
spring.kafka.producer.acks= # Number of acknowledgments the producer requires the leader to have received before considering a request complete. spring.kafka.producer.acks= # Number of acknowledgments the producer requires the leader to have received before considering a request complete.
spring.kafka.producer.batch-size= # Default batch size in bytes. spring.kafka.producer.batch-size= # Default batch size in bytes.
spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for producers.
spring.kafka.producer.buffer-memory= # Total bytes of memory the producer can use to buffer records waiting to be sent to the server. spring.kafka.producer.buffer-memory= # Total bytes of memory the producer can use to buffer records waiting to be sent to the server.
spring.kafka.producer.client-id= # ID to pass to the server when making requests. Used for server-side logging. spring.kafka.producer.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.producer.compression-type= # Compression type for all data generated by the producer. spring.kafka.producer.compression-type= # Compression type for all data generated by the producer.
@ -1105,6 +1105,22 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.ssl.trust-store-location= # Location of the trust store file. spring.kafka.ssl.trust-store-location= # Location of the trust store file.
spring.kafka.ssl.trust-store-password= # Store password for the trust store file. spring.kafka.ssl.trust-store-password= # Store password for the trust store file.
spring.kafka.ssl.trust-store-type= # Type of the trust store. spring.kafka.ssl.trust-store-type= # Type of the trust store.
spring.kafka.streams.application-id = # Kafka streams application.id property; default spring.application.name.
spring.kafka.streams.auto-startup=true # Whether or not to auto-start the streams factory bean.
spring.kafka.streams.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for streams.
spring.kafka.streams.cache-max-bytes-buffering= # Maximum number of memory bytes to be used for buffering across all threads.
spring.kafka.streams.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.streams.properties.*= # Additional Kafka properties used to configure the streams.
spring.kafka.streams.replication-factor= # The replication factor for change log topics and repartition topics created by the stream processing application.
spring.kafka.streams.ssl.key-password= # Password of the private key in the key store file.
spring.kafka.streams.ssl.key-store-location= # Location of the key store file.
spring.kafka.streams.ssl.key-store-password= # Store password for the key store file.
spring.kafka.streams.ssl.key-store-type= # Type of the key store.
spring.kafka.streams.ssl.protocol= # SSL protocol to use.
spring.kafka.streams.ssl.trust-store-location= # Location of the trust store file.
spring.kafka.streams.ssl.trust-store-password= # Store password for the trust store file.
spring.kafka.streams.ssl.trust-store-type= # Type of the trust store.
spring.kafka.streams.state-dir= # Directory location for the state store.
spring.kafka.template.default-topic= # Default topic to which messages are sent. spring.kafka.template.default-topic= # Default topic to which messages are sent.
# RABBIT ({sc-spring-boot-autoconfigure}/amqp/RabbitProperties.{sc-ext}[RabbitProperties]) # RABBIT ({sc-spring-boot-autoconfigure}/amqp/RabbitProperties.{sc-ext}[RabbitProperties])

@ -5634,6 +5634,34 @@ The following component creates a listener endpoint on the `someTopic` topic:
} }
---- ----
[[boot-features-kafka-streams]]
==== Kafka Streams
Spring for Apache Kafka provides a factory bean to create a `StreamsBuilder` object and
manage the lifecycle of its streams. Spring Boot auto-configures the required
`KafkaStreamsConfiguration` bean as long as `kafka-streams` in on the classpath and kafka
streams is enabled via the @EnableKafkaStreams` annotation.
Enabling Kafka Streams means that the application id and bootstrap servers must be set.
The former can be configured using `spring.kafka.streams.application-id`, defaulting to
`spring.application.name` if not set. The later can be set globally or
specifically overridden just for streams.
Several additional properties are available using dedicated properties; other arbitrary
Kafka properties can be set using the `spring.kafka.streams.properties` namespace. See
also <<boot-features-kafka-extra-props>> for more information.
To use the factory bean, simply wire `StreamsBuilder` into your `@Bean` as shown in the
following example:
[source,java,indent=0]
----
include::{code-examples}/kafka/KafkaStreamsBeanExample.java[tag=configuration]
----
By default, the streams managed by the `StreamBuilder` object it creates are started
automatically. You can customize this behaviour using the
`spring.kafka.streams.auto-startup` property.
[[boot-features-kafka-extra-props]] [[boot-features-kafka-extra-props]]
@ -5643,13 +5671,14 @@ The properties supported by auto configuration are shown in
(hyphenated or camelCase) map directly to the Apache Kafka dotted properties. Refer to the (hyphenated or camelCase) map directly to the Apache Kafka dotted properties. Refer to the
Apache Kafka documentation for details. Apache Kafka documentation for details.
The first few of these properties apply to both producers and consumers but can be The first few of these properties apply to all components (producers, consumers, admins,
specified at the producer or consumer level if you wish to use different values for each. and streams) but can be
specified at the component level if you wish to use different values.
Apache Kafka designates properties with an importance of HIGH, MEDIUM, or LOW. Spring Boot Apache Kafka designates properties with an importance of HIGH, MEDIUM, or LOW. Spring Boot
auto-configuration supports all HIGH importance properties, some selected MEDIUM and LOW auto-configuration supports all HIGH importance properties, some selected MEDIUM and LOW
properties, and any properties that do not have a default value. properties, and any properties that do not have a default value.
Only a subset of the properties supported by Kafka are available through the Only a subset of the properties supported by Kafka are available directly through the
`KafkaProperties` class. If you wish to configure the producer or consumer with additional `KafkaProperties` class. If you wish to configure the producer or consumer with additional
properties that are not directly supported, use the following properties: properties that are not directly supported, use the following properties:
@ -5659,11 +5688,13 @@ properties that are not directly supported, use the following properties:
spring.kafka.admin.properties.prop.two=second spring.kafka.admin.properties.prop.two=second
spring.kafka.consumer.properties.prop.three=third spring.kafka.consumer.properties.prop.three=third
spring.kafka.producer.properties.prop.four=fourth spring.kafka.producer.properties.prop.four=fourth
spring.kafka.streams.properties.prop.five=fifth
---- ----
This sets the common `prop.one` Kafka property to `first` (applies to producers, This sets the common `prop.one` Kafka property to `first` (applies to producers,
consumers and admins), the `prop.two` admin property to `second`, the `prop.three` consumers and admins), the `prop.two` admin property to `second`, the `prop.three`
consumer property to `third` and the `prop.four` producer property to `fourth`. consumer property to `third`, the `prop.four` producer property to `fourth` and the
`prop.five` streams property to `fifth`.
You can also configure the Spring Kafka `JsonDeserializer` as follows: You can also configure the Spring Kafka `JsonDeserializer` as follows:

@ -0,0 +1,53 @@
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.docs.kafka;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Produced;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.support.serializer.JsonSerde;
/**
* Example to show usage of {@link StreamsBuilder}.
*
* @author Stephane Nicoll
*/
public class KafkaStreamsBeanExample {
// tag::configuration[]
@Configuration
@EnableKafkaStreams
static class KafkaStreamsExampleConfiguration {
@Bean
public KStream<Integer, String> kStream(StreamsBuilder streamsBuilder) {
KStream<Integer, String> stream = streamsBuilder.stream("ks1In");
stream.map((k, v) -> new KeyValue(k, v.toUpperCase())).to("ks1Out",
Produced.with(Serdes.Integer(), new JsonSerde<>()));
return stream;
}
}
// end::configuration[]
}
Loading…
Cancel
Save