一、Maven裏面配置jar java
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.2</version>
</dependency>apache
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
<version>2.2.0</version>
</dependency>dom
二、Idea裏面編寫producer程序spa
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import scala.util.Random
import java.util
object KafkaProducer {
def main(args: Array[String]): Unit = {
//kafka-console-producer.sh --broker-list master:9092,master:9093 -topic mykafka2
val brokers="master:9092,master:9093"
val topic = "mykafka2"scala
val props = new util.HashMap[String,Object]()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,brokers)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")blog
val msgPerSec = 2
val wordgPerMsg = 3
val producer = new KafkaProducer[String,String](props)
while(true){
for(i<- 1 to msgPerSec){
val str = (1 to wordgPerMsg).map(x=>Random.nextInt(100)).toString.mkString(" ")
println(str)
val msg = new ProducerRecord[String,String](topic,null,str)
producer.send(msg)
}
Thread.sleep(1000)
}kafka
}
}it
三、Idea裏面查看結果spark
四、經過kafka 消費着接收的數據io
kafka-console-consumer.sh --zookeeper master:12181/kafka0.11 --topic mykafka2