<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> <!-- <context:property-placeholder location="classpath*:config/dbaasKafkaConfig.properties" /> --> <!-- 1、定义consumer参数 --> <!-- 采用原始xml生成bean方式 --> <bean id="consumerProperties" class="java.util.HashMap"> <constructor-arg> <map> <entry key="bootstrap.servers" value="${kafka.consumer.bootstrap.servers}"></entry> <entry key="group.id" value="${kafka.consumer.group.id}"></entry> <entry key="enable.auto.commit" value="${kafka.consumer.enable.auto.commit}"></entry> <entry key="session.timeout.ms" value="${kafka.consumer.session.timeout.ms}"></entry> <entry key="auto.commit.interval.ms" value="${kafka.consumer.auto.commit.interval.ms}"></entry> <entry key="retry.backoff.ms" value="100"></entry> <entry key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer" /> <entry key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer" /> </map> </constructor-arg> </bean> <!--2 创建consumerFactory bean --> <bean id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory"> <constructor-arg> <ref bean="consumerProperties"></ref> </constructor-arg> </bean> <!--3 定义消费实现 --> <bean id="kafkaConsumerService" class="...service.kafka.KafkaConsumerSerivce"/> <!--4 消费者容器配置信息 --> <bean id="containerProperties" class="org.springframework.kafka.listener.ContainerProperties"> <constructor-arg name="topics"> <list> <value>${kafka.consumer.topic}</value> </list> </constructor-arg> <property name="messageListener" ref="kafkaConsumerService"/> </bean> <!--5 消费者并发消息监听容器,执行doStart() --> <bean id="messageListenerContainer" class="org.springframework.kafka.listener.KafkaMessageListenerContainer" init-method="doStart"> <constructor-arg ref="consumerFactory"></constructor-arg> <constructor-arg ref="containerProperties"></constructor-arg> </bean> <!--6 定义producer参数 --> <bean id="producerProperties" class="java.util.HashMap"> <constructor-arg> <map> <entry key="bootstrap.servers" value="${kafka.producer.bootstrap.servers}"></entry> <entry key="retries" value="${kafka.producer.retries}"></entry> <entry key="batch.size" value="${kafka.producer.batch.size}"></entry> <entry key="linger.ms" value="${kafka.producer.linger.ms}"></entry> <entry key="buffer.memory" value="${kafka.producer.buffer.memory}"></entry> <entry key="acks" value="${kafka.producer.acks}"></entry> <entry key="key.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> <entry key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> </map> </constructor-arg> </bean> <!-- 创建kafkatemplate需要使用的producerfactory bean --> <bean id="producerFactory" class="org.springframework.kafka.core.DefaultKafkaProducerFactory"> <constructor-arg> <ref bean="producerProperties"/> </constructor-arg> </bean> <!--创建kafkatemplate bean 使用时候需要注入bean即可 --> <bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate"> <constructor-arg ref="producerFactory" /> <constructor-arg name="autoFlush" value="true" /> <property name="defaultTopic" value="${kafka.producer.defaultTopic}"></property> </bean> <bean id="kafkaProducerSerivce" class="...service.kafka.KafkaProducerSerivce"></bean> </beans>
public class KafkaConsumerSerivce implements MessageListener<Integer, String> { private Logger log=LoggerFactory.getLogger(this.getClass()); @Autowired KafkaProducerSerivce kafkaProducerService; @Autowired IEdbCommonService commService; /* (non-Javadoc) * @see org.springframework.kafka.listener.GenericMessageListener#onMessage(java.lang.Object) */ @Override public void onMessage(ConsumerRecord<Integer, String> record) { ... } }
public class KafkaProducerSerivce{ @Autowired KafkaTemplate<String, String> kafkaTemplate; private Logger log=LoggerFactory.getLogger(this.getClass()); public <K,T>void sendMessage(String topic, String data) { ListenableFuture<SendResult<String, String>> listenableFuture = null; listenableFuture = kafkaTemplate.send(topic, data); /*if (kafkaTemplate.getDefaultTopic().equals(topic)) { kafkaTemplate.send(topic, data); }else { listenableFuture = kafkaTemplate.send(topic, partition, timestamp, key, data); }*/ //发送成功回调 SuccessCallback<SendResult<String, String>> successCallback = new SuccessCallback<SendResult<String, String>>() { public void onSuccess(SendResult<String, String> result) { //成功业务逻辑 log.info("KafkaProducterSerivce send success"); } }; //发送失败回调 FailureCallback failureCallback = new FailureCallback() { public void onFailure(Throwable ex) { //失败重试 throw new RuntimeException(ex); } }; listenableFuture.addCallback(successCallback, failureCallback); } }
配置文件:
#####################kafka consumer ###################### kafka.consumer.bootstrap.servers= #如何设置自动提交#如果为true,消费者的偏移量将在后台定期提交 kafka.consumer.enable.auto.commit=true kafka.consumer.auto.commit.interval.ms=1000 #消费者群组ID,发布-订阅模式,即如果一个生产者,多个消费者都要消费,那么需要定义自己的群组,同一群组内的消费者只有一个能消费到消息 kafka.consumer.group.id=groupName kafka.consumer.topic=topicName #消费监听器容器并发数 kafka.consumer.concurrency=1 #在使用Kafka的组管理时,用于检铡消费者故障的起时 kafka.consumer.session.timeout.ms=30000 kafka.consumer.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer kafka.consumer.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer #brokers集群 kafka.producer.bootstrap.servers= #即所有副本都同步到数据时send方法才返回, 以此来完全判断数据是否发送成功, 理论上来讲数据不会丢失. kafka.producer.acks=all #发送失败重试次数 kafka.producer.retries=3 #批处理延迟时间上限:即1ms过后,不管是否达到批处理数,都直接发送一次请求 kafka.producer.linger.ms=10 #33554432即32MB的批处理缓冲区 kafka.producer.buffer.memory=40960 #批处理条数:当多个记录被发送到同一个分区时,生产者会尝试将记录合并到更少的请求中◊这有助于客户端和服务器的性能 kafka.producer.batch.size=4096 kafka.producer.defaultTopic=producerTopicName kafka.producer.key.serializer=org.apache.kafka.common.serialization.StringSerializer kafka.producer.value.serializer=org.apache.kafka.common.serialization.StringSerializer
需要的jar包:
spring-kafka 2.2.1
spring-message 4.1.6
kafka-client 2.1.0
版本号供参考,可以是其他版本的,具体也可参考spring-kafka官网。
https://spring.io/projects/spring-kafka
xml方式有个弊端,就是如果没有安装kafka服务是无法启动程序的。
可使用其他方式,比如注解,或者显式调用。
未经允许不得转载:最优质网--最有指望 » spring-kafka xml 方式 spring-kafka xml
评论前必须登录!
登陆 注册