4-Kafka Cheetsheet Final
4-Kafka Cheetsheet Final
================
Agenda:
* Intro to Kafka
* Kakfa spring boot hello world, using offset explore
* kafka consumer/producer custom objects
* Kafka Producer Example with java configuration
* Kafka error handling
* Intro to kafka streams
feature :
* kafka is fault tolerence
* in kafka cluster messages are replicated in multiple brokers
* replication factor messages is present in all the brokers
* kafka is scalable
we can add new brokers
we can increase no of consumers
Download kafka:
-------------
https://archive.apache.org/dist/kafka/3.4.0/kafka_2.12-3.4.0.tgz
change: server.properties
log.dirs=c:/kafka/kafka-logs
change : zookeeper.properties
dataDir=c:/kafka/zookeeper
3. Create topic
----------------
Topic: communication chennal on which producer put the messages and consumer
consume the the data
for performance consideration topic divided into partitions
If any partition is not working we keep replication
go to window:
List topic
describe topic
.\kafka-topics.bat --bootstrap-server localhost:9092 --describe --topic t-hello2
delete topic
.\kafka-topics.bat --bootstrap-server localhost:9092 --delete --topic t-hello2
4. Start Producer
--------------------
.\kafka-console-producer.bat --broker-list localhost:9092 --topic t-hello2
Send message
How are you
.\bin\windows\zookeeper-server-start.bat .\config\zookeeper.properties
@Autowired
private KafkaTemplate<String, String>kafkaTemplate;
@RestController
public class ProducerController {
@Autowired
private ProduceService produceService;
@GetMapping("producer")
public String callProducer(@RequestParam String message) {
produceService.produce(message);
return "ok";
}
}
server.port=8080
http://localhost:8080/producer?message=hello
consumer:
----------
@Service
public class ConsumerService {
@KafkaListener(topics = "my_topic", groupId = "my_topic_group_id")
public void consume(String message) {
System.out.println(message);
}
}
server.port=8081
//..........
@GetMapping(path = "producer/{message}")
public String processProduct(@PathVariable String message){
for(int i=0;i<5000;i++){
productService.processProduct(message+" "+i);
}
return "message is processed";
}
}
@Autowired
private KafkaTemplate<String, Product>kafkaTemplate;
@Autowired
private ProduceService produceService;
@PostMapping("producer")
public String callProducer(@RequestBody Product product) {
produceService.produce(product);
return "product added";
}
}
Kafka consumer:
-------------------
@Service
public class ConsumerService {
@KafkaListener(topics = "my_topic", groupId = "my_topic_group_id")
public void consume(Product product) {
System.out.println(product);
}
}
server.port=8081
spring.kafka.consumer.properties.spring.json.trusted.packages=*
spring.kafka.consumer.key-
deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-
deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
step 6: Spring Boot with Kafka Producer Example with java configuration
--------------------------------------------------------------
@Configuration
public class KafkaProducerConfig {
@Bean
public NewTopic createTopic(){
return new NewTopic("javatechie-demo", 3, (short) 1);
}
@Bean
public Map<String,Object> producerConfig(){
Map<String,Object> props=new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9092");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
JsonSerializer.class);
return props;
}
@Bean
public ProducerFactory<String,Object> producerFactory(){
return new DefaultKafkaProducerFactory<>(producerConfig());
}
@Bean
public KafkaTemplate<String,Object> kafkaTemplate(){
return new KafkaTemplate<>(producerFactory());
}
@Configuration
public class KafkaConsumerConfig {
@Bean
public Map<String, Object> consumerConfig() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9092");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
JsonDeserializer.class);
props.put(JsonDeserializer.TRUSTED_PACKAGES, "com.dto");
return props;
}
@Bean
public ConsumerFactory<String,Object> consumerFactory(){
return new DefaultKafkaConsumerFactory<>(consumerConfig());
}
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String,
Object>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
@Service
@Slf4j
public class KafkaMessageConsumer {
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
@DltHandler
public void listenDLT(User user, @Header(KafkaHeaders.RECEIVED_TOPIC) String
topic, @Header(KafkaHeaders.OFFSET) long offset) {
log.info("DLT Received : {} , from {} , offset
{}",user.getFirstName(),topic,offset);
}
}
@Scheduled(fixedRate = 1000)
public void sendHello() {
i++;
kafkaTemplate.send("t_hello", "fixed rate "+ i);
}
}
@EnableScheduling
@SpringBootApplication
public class KafkaProducerApplication implements CommandLineRunner{
}
Kafka installation on Linux:
-------------------------------
Start Zookeeper
bin/zookeeper-server-start.sh config/zookeeper.properties
Consumer:
# create topic t_hello
bin/kafka-topics.sh --bootstrap-server localhost:9092 --create --topic t_hello --
partitions 1 --replication-factor 1
# list topic
bin/kafka-topics.sh --bootstrap-server localhost:9092 --list
# describe topic
bin/kafka-topics.sh --bootstrap-server localhost:9092 --describe --topic t_hello