Powered By Blogger

Wednesday, October 2, 2019

Add jar to spark classpath

scala> :require /Users/basan/.ivy2/cache/org.apache.kafka/kafka-clients/jars/kafka-clients-0.10.2.2.jar
Added '/Users/basan/.ivy2/cache/org.apache.kafka/kafka-clients/jars/kafka-clients-0.10.2.2.jar' to classpath.


scala> import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.ConsumerRecord

scala>      val record1 : ConsumerRecord[String, String] = new ConsumerRecord("topic1",0,100,"Key1", "value1")
record1: org.apache.kafka.clients.consumer.ConsumerRecord[String,String] = ConsumerRecord(topic = topic1, partition = 0, offset = 100, NoTimestampType = -1, checksum = -1, serialized key size = -1, serialized value size = -1, key = Key1, value = value1)

scala>     val record2 : ConsumerRecord[String, String] = new ConsumerRecord("topic1",0,100,"Key2", "value2")
record2: org.apache.kafka.clients.consumer.ConsumerRecord[String,String] = ConsumerRecord(topic = topic1, partition = 0, offset = 100, NoTimestampType = -1, checksum = -1, serialized key size = -1, serialized value size = -1, key = Key2, value = value2)

scala>     record1.value()
res0: String = value1

No comments:

Post a Comment