Spring Cloud Stream整合Kafka

引入依賴
<dependency>
  <groupId>org.springframework.cloud</groupId>
  <artifactId>spring-cloud-stream-binder-kafka</artifactId>
</dependency>

java

<dependency>
  <groupId>org.springframework.cloud</groupId>
  <artifactId>spring-cloud-starter-stream-kafka</artifactId>
</dependency>
發送(Spring Kafka)
private KafkaTemplate<String, Object> kafkaTemplate;
    @Autowired
    public KafkaController(KafkaTemplate kafkaTemplate) {
        this.kafkaTemplate = kafkaTemplate;
    }

    @GetMapping("/send")
    public Person send(@PathVariable String name) {
        Person person = new Person();
        person.setId(System.currentTimeMillis());
        person.setName(name);
        kafkaTemplate.send("test-topic", person);
        return person;
    }
接收(Spring Kafka)
@KafkaListener(topics = "test-topic")
 public void consume(Person person){
        System.out.println(person.toString());
    }

//生產者端錯誤信息 There was an unexpected error (type=Internal Server Error, status=500). Can't convert value of class com.service.Person to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializerspring

消費者端錯誤信息 nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [[B] to [com.service.Person]apache

KafkaProperties-> Consumer->valueDeserializerapp

// 解決辦法 KafkaProperties-> Producer->valueSerializeride

spring:	
  kafka:
    producer:
      valueSerializer: com.service.kafka.ObjectSerializer #加入自定義序列化方式
    consumer:
      groupId: test
      valueDeserializer: com.service.kafka.ObjectDeSerializer
public class ObjectSerializer implements Serializer<Serializable> {
    @Override
    public void configure(Map<String, ?> map, boolean b) {

    }

    @Override
    public byte[] serialize(String s, Serializable serializable) {
        System.out.printf("topic:%s, data:%s", s, serializable);

        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        byte[] bytes = null;
        try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
            oos.writeObject(serializable);
            bytes = bos.toByteArray();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return bytes;
    }

    @Override
    public void close() {

    }
}

public class ObjectDeSerializer implements Deserializer<Serializable> {
    @Override
    public void configure(Map map, boolean b) {

    }

    @Override
    public Serializable deserialize(String s, byte[] bytes) {
        ByteArrayInputStream bs = new ByteArrayInputStream(bytes);
        Serializable result = null;
        try (ObjectInputStream os = new ObjectInputStream(bs)) {
            result = (Serializable) os.readObject();
        } catch (IOException | ClassNotFoundException e) {
            e.printStackTrace();
        }
        System.out.printf("topic:%s, data:%s", s, result);
        return result;
    }

    @Override
    public void close() {

    }
}
發送(Spring Cloud Stream Kafka)
@GetMapping("/stream/{name}")
    public Person streamSend(@PathVariable String name){
        Person person = new Person();
        person.setId(System.currentTimeMillis());
        person.setName(name);
        MessageChannel mc =  source.output();
        mc.send(MessageBuilder.withPayload(person).build());
        return person;
    }
自定義source
public interface PersonSource {

    /**
     * Name of the output channel.
     */
    String TOPIC = "test-topic";

    /**
     * @return output channel
     */
    @Output(PersonSource.TOPIC)
    MessageChannel source();
}
// 加入註解
@EnableBinding(value = {Source.class,PersonSource.class})

// 將source替換爲新定義的personSource
 MessageChannel mc =  personSource.source();
消費
// 使用以下方式會報錯
@KafkaListener(topics = "test-topic")
    public void consume(Person person){
        System.out.println(person.toString());
    }
// 以下方式正常
@StreamListener("test-topic")
    public void streamConsumer(Person person){
        System.out.println(person.toString());
    }
是否能經過給數據加入Header的方式解決問題
mc.send(MessageBuilder.withPayload(person).setHeader("Content-Type","application/bean").build());

經過加入header的方式依然不能反序列化成功.ui

注意
  • 雖然Spring Cloud Stream Binder 中存在Spring Kafka的整合,可是Spring Kafka和Spring Cloud Stream Kafka在處理數據的生產與消費是存在差別的。因此在使用上必定要配套使用。this

  • 當Spring Cloud Stream Kafka 發送消息包含頭信息時,Kafka DeSerializer在實現方法回調的時候並不會處理。code

  • 通常狀況能夠經過StreamListener 來監聽數據(主體),若是須要處理消息的header信息的話能夠經過SubscribableChannel來處理xml

@Bean
   public ApplicationRunner createRunner() {
       return (args) -> personSink.input().subscribe(message -> {
           MessageHeaders headers = message.getHeaders();
           Object obj = message.getPayload();
           System.out.printf("receive message, header:%s, body:%s", headers, obj);
       });
   }

可是若是上述代碼與以下代碼同時存在,那麼他們會輪流執行對象

@StreamListener("test-topic")
    public void streamConsumer(Person person){
        System.out.println(person.toString());
    }
Input註解
  • 對應 - SubscribableChannel
Output註解
  • 對應 - MessageChannel

二者均屏蔽了具體Stream的具體實現。 不管是@Input仍是@Output他們的value不容許重複(bean不容許重複),能夠經過destination來申明topic

spring:
    cloud:
        stream:
          bindings:
            test-topic-provider:
              destination: test-topic
            test-topic-consume:
              group: test02
              destination: test-topic
/**
     * Name of the output channel.
     */
    String TOPIC = "test-topic-provider";

    /**
     * @return output channel
     */
    @Output(PersonSource.TOPIC)
    MessageChannel source();
/**
     * Input channel name.
     */
    String INPUT = "test-topic-consume";

    /**
     * @return input channel.
     */
    @Input(INPUT)
    SubscribableChannel input();
@StreamListener(PersonSource.TOPIC)
    public void streamConsumer(Person person){
        System.out.println(person.toString());
    }
SubscribableChannel與@StreamListener

二者實現存在着差別,SubscribableChannel會觸發kafka的自定義反序列化,因此Spring Cloud Stream Kafka 是將對象序列化成JSON, 經過JSON反序列化成對象(不通過自定義kafka的Serializer/DeSerializer)。

相關文章
相關標籤/搜索