前面的文章咱們已經完成了Kafka基於Zookeeper的集羣的搭建了。Kafka集羣搭建請點我。記過幾天的研究已經實現Spring的集成了。本文重點java
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version>1.3.0.RELEASE</version>
</dependency>
友情提醒:本身在網上看的教程多引入了kafka_2.10jar包。個人項目報錯。建議搭建指引入和kafka相關的上面那個jar包web
有了jar包咱們只須要在spring的配置文件中配置就好了。這裏我單獨將生產者和消費者進行抽離配置spring
首先咱們配置生產消息的頻道(工具類),這個頻道基於queue。最後咱們在消息發送也是經過該類實現發送消息的apache
<int:channel id="kafkaProducerChannel">
<int:queue />
</int:channel>
<int-kafka:outbound-channel-adapter
id="kafkaOutboundChannelAdapterTopic" kafka-producer-context-ref="producerContextTopic"
auto-startup="true" channel="kafkaProducerChannel" order="3">
<int:poller fixed-delay="1000" time-unit="MILLISECONDS"
receive-timeout="1" task-executor="taskExecutor" />
</int-kafka:outbound-channel-adapter>
<int-kafka:producer-context id="producerContextTopic"
producer-properties="producerProperties">
<int-kafka:producer-configurations>
<!-- 多個topic配置 broker-list kafaka服務
key_serializer value-serializer 就是用了本身的編碼格式
value-class-type 指定發送消息的類型-->
<int-kafka:producer-configuration
broker-list="192.168.1.130:9091" key-serializer="stringSerializer"
value-class-type="java.lang.Object" value-serializer="stringSerializer"
topic="testTopic" />
<int-kafka:producer-configuration
broker-list="192.168.1.130:9091" key-serializer="stringSerializer"
value-class-type="java.lang.Object" value-serializer="stringSerializer"
topic="myTopic" />
</int-kafka:producer-configurations>
</int-kafka:producer-context>
<bean id="stringSerializer" class="com.bshinfo.web.base.kafka.producer.MySerializer" />
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration" xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka" xmlns:task="http://www.springframework.org/schema/task" xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
<!-- 生產者生產信息是鍵值對內容的格式。默認是 org.apache.kafka.common.serialization.StringSerializer 這裏咱們重寫方法。便於方法傳遞對象 具體看MySerializer-->
<bean id="stringSerializer" class="com.bshinfo.web.base.kafka.producer.MySerializer" />
<!-- 這裏的Encoder在下面沒有用到 刪掉也能夠 Encoder和Serializer只用設置一個就好了。 consumer.xml中的配置也是同樣 -->
<!-- <bean id="kafkaEncoder" class="org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder"> <constructor-arg value="com.kafka.demo.util.ObjectEncoder" /> </bean> -->
<!-- 生產者一些配置屬性。不配置按默認執行 -->
<bean id="producerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="properties">
<props>
<prop key="topic.metadata.refresh.interval.ms">3600000</prop>
<prop key="message.send.max.retries">5</prop>
<!-- <prop key="serializer.class">com.kafka.demo.util.ObjectEncoder</prop> -->
<prop key="request.required.acks">1</prop>
</props>
</property>
</bean>
<!-- 生產者經過這個頻道傳送消息 基於queue-->
<int:channel id="kafkaProducerChannel">
<int:queue />
</int:channel>
<!-- 生產者發送消息設置 頻道+方法配置 -->
<int-kafka:outbound-channel-adapter id="kafkaOutboundChannelAdapterTopic" kafka-producer-context-ref="producerContextTopic" auto-startup="true" channel="kafkaProducerChannel" order="3">
<int:poller fixed-delay="1000" time-unit="MILLISECONDS" receive-timeout="1" task-executor="taskExecutor" />
</int-kafka:outbound-channel-adapter>
<task:executor id="taskExecutor" pool-size="5" keep-alive="120" queue-capacity="500" />
<!-- 消息發送的主題設置。必須設置了主題才能發送相應主題消息 -->
<int-kafka:producer-context id="producerContextTopic" producer-properties="producerProperties">
<int-kafka:producer-configurations>
<!-- 多個topic配置 broker-list kafaka服務 key_serializer value-serializer 就是用了本身的編碼格式 value-class-type 指定發送消息的類型-->
<int-kafka:producer-configuration broker-list="192.168.1.130:9091" key-serializer="stringSerializer" value-class-type="java.lang.Object" value-serializer="stringSerializer" topic="testTopic" />
<int-kafka:producer-configuration broker-list="192.168.1.130:9091" key-serializer="stringSerializer" value-class-type="java.lang.Object" value-serializer="stringSerializer" topic="myTopic" />
</int-kafka:producer-configurations>
</int-kafka:producer-context>
</beans>
上面的配置就能夠實現消息的發送了。咱們項目中會繼續配置接收消息(消費者)。配置和生產者的配置同樣。這裏就不詳細的解釋了。代碼裏解釋的很詳細了。只不過裏面多了配置Zookeeper的集羣信息。還有一點由於在生產者我配置的序列化。因此這裏爲了配置全面這裏採用配置的編碼了 json
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration" xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka" xmlns:task="http://www.springframework.org/schema/task" xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
<!-- 接收的頻道 也能夠理解爲接收的工具類 -->
<int:channel id="inputFromKafka">
<int:dispatcher task-executor="kafkaMessageExecutor" />
</int:channel>
<!-- zookeeper配置 能夠配置多個 -->
<int-kafka:zookeeper-connect id="zookeeperConnect" zk-connect="192.168.1.130:2181,192.168.1.130:2182,192.168.1.130:2183" zk-connection-timeout="6000" zk-session-timeout="6000" zk-sync-time="2000" />
<!-- channel配置 auto-startup="true" 不然接收不發數據 -->
<int-kafka:inbound-channel-adapter id="kafkaInboundChannelAdapter" kafka-consumer-context-ref="consumerContext" auto-startup="true" channel="inputFromKafka">
<int:poller fixed-delay="1" time-unit="MILLISECONDS" />
</int-kafka:inbound-channel-adapter>
<task:executor id="kafkaMessageExecutor" pool-size="8" keep-alive="120" queue-capacity="500" />
<!-- <bean id="kafkaDecoder" class="org.springframework.integration.kafka.serializer.common.StringDecoder" /> -->
<bean id="kafkaDecoder" class="com.bshinfo.web.base.kafka.consumer.MyDecoder" />
<bean id="consumerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="properties">
<props>
<prop key="auto.offset.reset">smallest</prop>
<prop key="socket.receive.buffer.bytes">10485760</prop> <!-- 10M -->
<prop key="fetch.message.max.bytes">5242880</prop>
<prop key="auto.commit.interval.ms">1000</prop>
</props>
</property>
</bean>
<!-- 消息接收的BEEN -->
<bean id="kafkaConsumerService" class="com.bshinfo.web.base.kafka.consumer.ConsumerMessages" />
<!-- 指定接收的方法 -->
<int:outbound-channel-adapter channel="inputFromKafka" ref="kafkaConsumerService" method="processMessage" />
<int-kafka:consumer-context id="consumerContext" consumer-timeout="1000" zookeeper-connect="zookeeperConnect" consumer-properties="consumerProperties">
<int-kafka:consumer-configurations>
<int-kafka:consumer-configuration group-id="default1" value-decoder="kafkaDecoder" key-decoder="kafkaDecoder" max-messages="5000">
<!-- 兩個TOPIC配置 -->
<int-kafka:topic id="myTopic" streams="4" />
<int-kafka:topic id="testTopic" streams="4" />
</int-kafka:consumer-configuration>
</int-kafka:consumer-configurations>
</int-kafka:consumer-context>
</beans>
package com.bshinfo.web.base.kafka.consumer;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import net.sf.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ConsumerMessages
{
private static final Logger logger = LoggerFactory.getLogger(ConsumerMessages.class);
public void processMessage(Map<String, Map<Integer, Object>> msgs)
{
logger.info("================================processMessage===============");
for (Map.Entry<String, Map<Integer, Object>> entry : msgs.entrySet())
{
logger.info("============Topic:" + entry.getKey());
System.err.println("============Topic:" + entry.getKey());
Map<Integer, Object> messages = entry.getValue();
Set<Integer> keys = messages.keySet();
for (Integer i : keys)
{
logger.info("======Partition:" + i);
System.err.println("======Partition:" + i);
}
Collection<Object> values = messages.values();
for (Iterator<Object> iterator = values.iterator(); iterator.hasNext();)
{
Object object = iterator.next();
String message = "["+object.toString()+"]";
logger.info("=====message:" + message);
System.err.println("=====message:" + message);
JSONArray jsonArray = JSONArray.fromObject(object);
for (int i=0;i<jsonArray.size();i++)
{
Object object2 = jsonArray.get(i);
System.out.println(object2.toString());
/*JSONObject object2 = (JSONObject) jsonArray.get(i); UserInfo userInfo = (UserInfo) JSONObject.toBean(object2,UserInfo.class); System.out.println(userInfo.getRealName()+"@@@"+userInfo.getUserSex());*/
}
}
}
}
}
源碼下載markdown