一、前提条件

搭建了3个结点的Kafka集群,并且创建了一个名为test1的topic

二、添加maven依赖

<!--kafka-spring 集成--> 
<dependency> 
    <groupId>org.springframework.integration</groupId> 
    <artifactId>spring-integration-kafka</artifactId> 
    <version>2.0.0.RELEASE</version> 
    <exclusions> 
        <exclusion> 
            <groupId>log4j</groupId> 
            <artifactId>log4j</artifactId> 
        </exclusion> 
    </exclusions> 
</dependency>

三、配置生产者

<?xml version="1.0" encoding="UTF-8"?> 
<beans xmlns="http://www.springframework.org/schema/beans" 
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
       xsi:schemaLocation=" 
        http://www.springframework.org/schema/beans 
        http://www.springframework.org/schema/beans/spring-beans.xsd"> 
 
    <!-- 定义producer的参数 --> 
    <bean id="producerProperties" class="java.util.HashMap"> 
        <constructor-arg> 
            <map> 
                <!-- 配置kafka的broke --> 
                <entry key="bootstrap.servers" value="192.168.172.129:9092"/> 
                <!-- 配置组--> 
                <entry key="group.id" value="group1"/> 
                <entry key="acks" value="all"/> 
                <entry key="retries" value="10"/> 
                <entry key="batch.size" value="16384"/> 
                <entry key="linger.ms" value="1"/> 
                <entry key="buffer.memory" value="33554432"/> 
                <entry key="key.serializer" value="org.apache.kafka.common.serialization.StringSerializer"/> 
                <entry key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer"/> 
            </map> 
        </constructor-arg> 
    </bean> 
 
    <!-- 创建kafkatemplate需要使用的producerfactory bean --> 
    <bean id="producerFactory" class="org.springframework.kafka.core.DefaultKafkaProducerFactory"> 
        <constructor-arg> 
            <ref bean="producerProperties"/> 
        </constructor-arg> 
    </bean> 
 
    <!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 --> 
    <bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate"> 
        <constructor-arg ref="producerFactory"/> 
        <constructor-arg name="autoFlush" value="true"/> 
        <property name="defaultTopic" value="test1"/> 
    </bean> 
 
</beans>

四、配置消费者

<?xml version="1.0" encoding="UTF-8"?> 
<beans xmlns="http://www.springframework.org/schema/beans" 
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
       xmlns:int="http://www.springframework.org/schema/integration" 
       xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka" 
       xmlns:task="http://www.springframework.org/schema/task" 
       xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd 
        http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd 
        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd 
        http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd"> 
 
    <!-- 定义consumer的参数 --> 
    <bean id="consumerProperties" class="java.util.HashMap"> 
        <constructor-arg> 
            <map> 
                <!-- 配置kafka的broke --> 
                <entry key="bootstrap.servers" value="192.168.172.129:9092"/> 
                <!-- 配置组--> 
                <entry key="group.id" value="group1"/> 
                <entry key="enable.auto.commit" value="true"/> 
                <entry key="auto.commit.interval.ms" value="1000"/> 
                <entry key="session.timeout.ms" value="30000"/> 
                <entry key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> 
                <entry key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> 
            </map> 
        </constructor-arg> 
    </bean> 
 
    <!-- 创建consumerFactory bean --> 
    <bean id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory"> 
        <constructor-arg> 
            <ref bean="consumerProperties"/> 
        </constructor-arg> 
    </bean> 
 
    <!-- 实际执行消息消费的类 --> 
    <bean id="messageListernerConsumerService" class="com.netease.hdone.repay.vo.ConsumerListener"/> 
 
    <!-- 消费者容器配置信息 --> 
    <bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties"> 
        <!-- 重要!配置topic --> 
        <constructor-arg value="test1"/> 
        <property name="messageListener" ref="messageListernerConsumerService"/> 
    </bean> 
 
    <!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 --> 
    <bean id="messageListenerContainer" class="org.springframework.kafka.listener.KafkaMessageListenerContainer" init-method="doStart"> 
        <constructor-arg ref="consumerFactory"/> 
        <constructor-arg ref="containerProperties"/> 
    </bean> 
 
 
</beans>

五、消息消费类

public final class ConsumerListener implements MessageListener<String,String> { 
 
    private static final Logger logger = LoggerFactory.getLogger(ConsumerListener.class); 
 
    @Override 
    public void onMessage(ConsumerRecord<String, String> data) { 
       System.out.println("消息: " + data); 
    } 
}

六、生产者发送消息

    @Autowired 
    private KafkaTemplate<String, String> kafkaTemplate; 
 
    @RequestMapping(value = "/test.do") 
    @ResponseBody 
    public String index() { 
        for (int i = 0; i < 10; i++) { 
            logger.info("======send=====" + i); 
            kafkaTemplate.sendDefault("Hello,Kafka"); 
        } 
        return "finished"; 
    }
发布评论

分享到:

IT虾米网

微信公众号号:IT虾米 (左侧二维码扫一扫)欢迎添加!

Zookeeper集群搭建(伪集群)详解
你是第一个吃螃蟹的人
发表评论

◎欢迎参与讨论,请在这里发表您的看法、交流您的观点。