Merge pull request #7672 from garyrussell/GH-7646

* pr/7672:
  Polish Kafka properties
  Support arbitrary Kafka properties
pull/7718/merge
Phillip Webb 8 years ago
commit 85ed90282d

@ -33,6 +33,7 @@ import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.kafka.listener.AbstractMessageListenerContainer.AckMode; import org.springframework.kafka.listener.AbstractMessageListenerContainer.AckMode;
import org.springframework.util.CollectionUtils;
/** /**
* Configuration properties for Spring for Apache Kafka. * Configuration properties for Spring for Apache Kafka.
@ -47,18 +48,6 @@ import org.springframework.kafka.listener.AbstractMessageListenerContainer.AckMo
@ConfigurationProperties(prefix = "spring.kafka") @ConfigurationProperties(prefix = "spring.kafka")
public class KafkaProperties { public class KafkaProperties {
private final Consumer consumer = new Consumer();
private final Producer producer = new Producer();
private final Listener listener = new Listener();
private final Template template = new Template();
private final Ssl ssl = new Ssl();
// Apache Kafka Common Properties
/** /**
* Comma-delimited list of host:port pairs to use for establishing the initial * Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. * connection to the Kafka cluster.
@ -71,25 +60,20 @@ public class KafkaProperties {
*/ */
private String clientId; private String clientId;
public Consumer getConsumer() { /**
return this.consumer; * Additional properties used to configure the client.
} */
private Map<String, String> properties = new HashMap<String, String>();
public Producer getProducer() { private final Consumer consumer = new Consumer();
return this.producer;
}
public Listener getListener() { private final Producer producer = new Producer();
return this.listener;
}
public Ssl getSsl() { private final Listener listener = new Listener();
return this.ssl;
}
public Template getTemplate() { private final Ssl ssl = new Ssl();
return this.template;
} private final Template template = new Template();
public List<String> getBootstrapServers() { public List<String> getBootstrapServers() {
return this.bootstrapServers; return this.bootstrapServers;
@ -107,6 +91,34 @@ public class KafkaProperties {
this.clientId = clientId; this.clientId = clientId;
} }
public Map<String, String> getProperties() {
return this.properties;
}
public void setProperties(Map<String, String> properties) {
this.properties = properties;
}
public Consumer getConsumer() {
return this.consumer;
}
public Producer getProducer() {
return this.producer;
}
public Listener getListener() {
return this.listener;
}
public Ssl getSsl() {
return this.ssl;
}
public Template getTemplate() {
return this.template;
}
private Map<String, Object> buildCommonProperties() { private Map<String, Object> buildCommonProperties() {
Map<String, Object> properties = new HashMap<String, Object>(); Map<String, Object> properties = new HashMap<String, Object>();
if (this.bootstrapServers != null) { if (this.bootstrapServers != null) {
@ -135,6 +147,9 @@ public class KafkaProperties {
properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,
this.ssl.getTruststorePassword()); this.ssl.getTruststorePassword());
} }
if (!CollectionUtils.isEmpty(this.properties)) {
properties.putAll(this.properties);
}
return properties; return properties;
} }
@ -147,9 +162,9 @@ public class KafkaProperties {
* instance * instance
*/ */
public Map<String, Object> buildConsumerProperties() { public Map<String, Object> buildConsumerProperties() {
Map<String, Object> props = buildCommonProperties(); Map<String, Object> properties = buildCommonProperties();
props.putAll(this.consumer.buildProperties()); properties.putAll(this.consumer.buildProperties());
return props; return properties;
} }
/** /**
@ -161,9 +176,9 @@ public class KafkaProperties {
* instance * instance
*/ */
public Map<String, Object> buildProducerProperties() { public Map<String, Object> buildProducerProperties() {
Map<String, Object> props = buildCommonProperties(); Map<String, Object> properties = buildCommonProperties();
props.putAll(this.producer.buildProperties()); properties.putAll(this.producer.buildProperties());
return props; return properties;
} }
private static String resourceToPath(Resource resource) { private static String resourceToPath(Resource resource) {
@ -240,6 +255,11 @@ public class KafkaProperties {
*/ */
private Class<?> valueDeserializer = StringDeserializer.class; private Class<?> valueDeserializer = StringDeserializer.class;
/**
* Maximum number of records returned in a single call to poll().
*/
private Integer maxPollRecords;
public Ssl getSsl() { public Ssl getSsl() {
return this.ssl; return this.ssl;
} }
@ -332,6 +352,14 @@ public class KafkaProperties {
this.valueDeserializer = valueDeserializer; this.valueDeserializer = valueDeserializer;
} }
public Integer getMaxPollRecords() {
return this.maxPollRecords;
}
public void setMaxPollRecords(Integer maxPollRecords) {
this.maxPollRecords = maxPollRecords;
}
public Map<String, Object> buildProperties() { public Map<String, Object> buildProperties() {
Map<String, Object> properties = new HashMap<String, Object>(); Map<String, Object> properties = new HashMap<String, Object>();
if (this.autoCommitInterval != null) { if (this.autoCommitInterval != null) {
@ -395,6 +423,10 @@ public class KafkaProperties {
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
this.valueDeserializer); this.valueDeserializer);
} }
if (this.maxPollRecords != null) {
properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG,
this.maxPollRecords);
}
return properties; return properties;
} }

@ -60,13 +60,16 @@ public class KafkaAutoConfigurationTests {
@Test @Test
public void consumerProperties() { public void consumerProperties() {
load("spring.kafka.bootstrap-servers=foo:1234", load("spring.kafka.bootstrap-servers=foo:1234", "spring.kafka.properties.foo=bar",
"spring.kafka.properties.baz=qux",
"spring.kafka.properties.foo.bar.baz=qux.fiz.buz",
"spring.kafka.ssl.key-password=p1", "spring.kafka.ssl.key-password=p1",
"spring.kafka.ssl.keystore-location=classpath:ksLoc", "spring.kafka.ssl.keystore-location=classpath:ksLoc",
"spring.kafka.ssl.keystore-password=p2", "spring.kafka.ssl.keystore-password=p2",
"spring.kafka.ssl.truststore-location=classpath:tsLoc", "spring.kafka.ssl.truststore-location=classpath:tsLoc",
"spring.kafka.ssl.truststore-password=p3", "spring.kafka.ssl.truststore-password=p3",
"spring.kafka.consumer.auto-commit-interval=123", "spring.kafka.consumer.auto-commit-interval=123",
"spring.kafka.consumer.max-poll-records=42",
"spring.kafka.consumer.auto-offset-reset=earliest", "spring.kafka.consumer.auto-offset-reset=earliest",
"spring.kafka.consumer.client-id=ccid", // test override common "spring.kafka.consumer.client-id=ccid", // test override common
"spring.kafka.consumer.enable-auto-commit=false", "spring.kafka.consumer.enable-auto-commit=false",
@ -109,6 +112,10 @@ public class KafkaAutoConfigurationTests {
.isEqualTo(LongDeserializer.class); .isEqualTo(LongDeserializer.class);
assertThat(configs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG)) assertThat(configs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG))
.isEqualTo(IntegerDeserializer.class); .isEqualTo(IntegerDeserializer.class);
assertThat(configs.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG)).isEqualTo(42);
assertThat(configs.get("foo")).isEqualTo("bar");
assertThat(configs.get("baz")).isEqualTo("qux");
assertThat(configs.get("foo.bar.baz")).isEqualTo("qux.fiz.buz");
} }
@Test @Test

@ -878,6 +878,7 @@ content into your application; rather pick only the properties that you need.
spring.kafka.consumer.group-id= # Unique string that identifies the consumer group this consumer belongs to. spring.kafka.consumer.group-id= # Unique string that identifies the consumer group this consumer belongs to.
spring.kafka.consumer.heartbeat-interval= # Expected time in milliseconds between heartbeats to the consumer coordinator. spring.kafka.consumer.heartbeat-interval= # Expected time in milliseconds between heartbeats to the consumer coordinator.
spring.kafka.consumer.key-deserializer= # Deserializer class for keys. spring.kafka.consumer.key-deserializer= # Deserializer class for keys.
spring.kafka.consumer.max-poll-messages= # Maximum number of records returned in a single call to poll().
spring.kafka.consumer.value-deserializer= # Deserializer class for values. spring.kafka.consumer.value-deserializer= # Deserializer class for values.
spring.kafka.listener.ack-count= # Number of records between offset commits when ackMode is "COUNT" or "COUNT_TIME". spring.kafka.listener.ack-count= # Number of records between offset commits when ackMode is "COUNT" or "COUNT_TIME".
spring.kafka.listener.ack-mode= # Listener AckMode; see the spring-kafka documentation. spring.kafka.listener.ack-mode= # Listener AckMode; see the spring-kafka documentation.
@ -893,6 +894,7 @@ content into your application; rather pick only the properties that you need.
spring.kafka.producer.key-serializer= # Serializer class for keys. spring.kafka.producer.key-serializer= # Serializer class for keys.
spring.kafka.producer.retries= # When greater than zero, enables retrying of failed sends. spring.kafka.producer.retries= # When greater than zero, enables retrying of failed sends.
spring.kafka.producer.value-serializer= # Serializer class for values. spring.kafka.producer.value-serializer= # Serializer class for values.
spring.kafka.properties.*= # Additional properties used to configure the client.
spring.kafka.ssl.key-password= # Password of the private key in the key store file. spring.kafka.ssl.key-password= # Password of the private key in the key store file.
spring.kafka.ssl.keystore-location= # Location of the key store file. spring.kafka.ssl.keystore-location= # Location of the key store file.
spring.kafka.ssl.keystore-password= # Store password for the key store file. spring.kafka.ssl.keystore-password= # Store password for the key store file.

@ -4643,18 +4643,23 @@ auto configuration supports all HIGH importance properties, some selected MEDIUM
and any that do not have a default value. and any that do not have a default value.
Only a subset of the properties supported by Kafka are available via the `KafkaProperties` Only a subset of the properties supported by Kafka are available via the `KafkaProperties`
class. If you wish to configure the producer or consumer with additional properties, you class. If you wish to configure the producer or consumer with additional properties that
can override the producer factory and/or consumer factory bean, adding additional are not directly supported, use the following:
properties, for example:
[source,properties,indent=0]
----
spring.kafka.properties.foo.bar=baz
----
This sets the common `foo.bar` Kafka property to `baz`.
These properties will be shared by both the consumer and producer factory beans.
If you wish to customize these components with different properties, such as to use a
different metrics reader for each, you can override the bean definitions, as follows:
[source,java,indent=0] [source,java,indent=0]
---- ----
@Bean include::{code-examples}/kafka/KafkaSpecialProducerConsumerConfigExample.java[tag=configuration]
public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties) {
Map<String, Object> producerProperties = properties.buildProducerProperties();
producerProperties.put("some.property", "some.value");
return new DefaultKafkaProducerFactory<Object, Object>(producerProperties);
}
---- ----

@ -0,0 +1,125 @@
/*
* Copyright 2016-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.kafka;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.MetricsReporter;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.ProducerFactory;
/**
* Example custom kafka configuration beans used when the user wants to apply different
* common properties to the producer and consumer.
*
* @author Gary Russell
* @since 1.5
*/
public class KafkaSpecialProducerConsumerConfigExample {
// tag::configuration[]
@Configuration
public static class CustomKafkaBeans {
/**
* Customized ProducerFactory bean.
* @param properties the kafka properties.
* @return the bean.
*/
@Bean
public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties) {
Map<String, Object> producerProperties = properties.buildProducerProperties();
producerProperties.put(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG,
MyProducerMetricsReporter.class);
return new DefaultKafkaProducerFactory<Object, Object>(producerProperties);
}
/**
* Customized ConsumerFactory bean.
* @param properties the kafka properties.
* @return the bean.
*/
@Bean
public ConsumerFactory<?, ?> kafkaConsumerFactory(KafkaProperties properties) {
Map<String, Object> consumererProperties = properties
.buildConsumerProperties();
consumererProperties.put(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG,
MyConsumerMetricsReporter.class);
return new DefaultKafkaConsumerFactory<Object, Object>(consumererProperties);
}
}
// end::configuration[]
public static class MyConsumerMetricsReporter implements MetricsReporter {
@Override
public void configure(Map<String, ?> configs) {
}
@Override
public void init(List<KafkaMetric> metrics) {
}
@Override
public void metricChange(KafkaMetric metric) {
}
@Override
public void metricRemoval(KafkaMetric metric) {
}
@Override
public void close() {
}
}
public static class MyProducerMetricsReporter implements MetricsReporter {
@Override
public void configure(Map<String, ?> configs) {
}
@Override
public void init(List<KafkaMetric> metrics) {
}
@Override
public void metricChange(KafkaMetric metric) {
}
@Override
public void metricRemoval(KafkaMetric metric) {
}
@Override
public void close() {
}
}
}
Loading…
Cancel
Save