<dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-stream-binder-kafka</artifactId> </dependency>
或java
<dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-starter-stream-kafka</artifactId> </dependency>
private KafkaTemplate<String, Object> kafkaTemplate; @Autowired public KafkaController(KafkaTemplate kafkaTemplate) { this.kafkaTemplate = kafkaTemplate; } @GetMapping("/send") public Person send(@PathVariable String name) { Person person = new Person(); person.setId(System.currentTimeMillis()); person.setName(name); kafkaTemplate.send("test-topic", person); return person; }
@KafkaListener(topics = "test-topic") public void consume(Person person){ System.out.println(person.toString()); }
//生產者端錯誤信息 There was an unexpected error (type=Internal Server Error, status=500). Can't convert value of class com.service.Person to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializerspring
消費者端錯誤信息 nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [[B] to [com.service.Person]apache
KafkaProperties-> Consumer->valueDeserializerapp
// 解決辦法 KafkaProperties-> Producer->valueSerializeride
spring: kafka: producer: valueSerializer: com.service.kafka.ObjectSerializer #加入自定義序列化方式 consumer: groupId: test valueDeserializer: com.service.kafka.ObjectDeSerializer
public class ObjectSerializer implements Serializer<Serializable> { @Override public void configure(Map<String, ?> map, boolean b) { } @Override public byte[] serialize(String s, Serializable serializable) { System.out.printf("topic:%s, data:%s", s, serializable); ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] bytes = null; try (ObjectOutputStream oos = new ObjectOutputStream(bos)) { oos.writeObject(serializable); bytes = bos.toByteArray(); } catch (IOException e) { e.printStackTrace(); } return bytes; } @Override public void close() { } } public class ObjectDeSerializer implements Deserializer<Serializable> { @Override public void configure(Map map, boolean b) { } @Override public Serializable deserialize(String s, byte[] bytes) { ByteArrayInputStream bs = new ByteArrayInputStream(bytes); Serializable result = null; try (ObjectInputStream os = new ObjectInputStream(bs)) { result = (Serializable) os.readObject(); } catch (IOException | ClassNotFoundException e) { e.printStackTrace(); } System.out.printf("topic:%s, data:%s", s, result); return result; } @Override public void close() { } }
@GetMapping("/stream/{name}") public Person streamSend(@PathVariable String name){ Person person = new Person(); person.setId(System.currentTimeMillis()); person.setName(name); MessageChannel mc = source.output(); mc.send(MessageBuilder.withPayload(person).build()); return person; }
public interface PersonSource { /** * Name of the output channel. */ String TOPIC = "test-topic"; /** * @return output channel */ @Output(PersonSource.TOPIC) MessageChannel source(); }
// 加入註解 @EnableBinding(value = {Source.class,PersonSource.class}) // 將source替換爲新定義的personSource MessageChannel mc = personSource.source();
// 使用以下方式會報錯 @KafkaListener(topics = "test-topic") public void consume(Person person){ System.out.println(person.toString()); }
// 以下方式正常 @StreamListener("test-topic") public void streamConsumer(Person person){ System.out.println(person.toString()); }
mc.send(MessageBuilder.withPayload(person).setHeader("Content-Type","application/bean").build());
經過加入header的方式依然不能反序列化成功.ui
雖然Spring Cloud Stream Binder 中存在Spring Kafka的整合,可是Spring Kafka和Spring Cloud Stream Kafka在處理數據的生產與消費是存在差別的。因此在使用上必定要配套使用。this
當Spring Cloud Stream Kafka 發送消息包含頭信息時,Kafka DeSerializer在實現方法回調的時候並不會處理。code
通常狀況能夠經過StreamListener 來監聽數據(主體),若是須要處理消息的header信息的話能夠經過SubscribableChannel來處理xml
@Bean public ApplicationRunner createRunner() { return (args) -> personSink.input().subscribe(message -> { MessageHeaders headers = message.getHeaders(); Object obj = message.getPayload(); System.out.printf("receive message, header:%s, body:%s", headers, obj); }); }
可是若是上述代碼與以下代碼同時存在,那麼他們會輪流執行對象
@StreamListener("test-topic") public void streamConsumer(Person person){ System.out.println(person.toString()); }
二者均屏蔽了具體Stream的具體實現。 不管是
@Input
仍是@Output
他們的value
不容許重複(bean
不容許重複),能夠經過destination
來申明topic
spring: cloud: stream: bindings: test-topic-provider: destination: test-topic test-topic-consume: group: test02 destination: test-topic
/** * Name of the output channel. */ String TOPIC = "test-topic-provider"; /** * @return output channel */ @Output(PersonSource.TOPIC) MessageChannel source();
/** * Input channel name. */ String INPUT = "test-topic-consume"; /** * @return input channel. */ @Input(INPUT) SubscribableChannel input();
@StreamListener(PersonSource.TOPIC) public void streamConsumer(Person person){ System.out.println(person.toString()); }
二者實現存在着差別,SubscribableChannel會觸發kafka的自定義反序列化,因此Spring Cloud Stream Kafka 是將對象序列化成JSON, 經過JSON反序列化成對象(不通過自定義kafka的Serializer/DeSerializer)。