重试 2 次后,我的主题转到 DLQ。
有没有办法按需重新发布 dlq 主题(手动使用 api)?
这里我添加了三个 api 的代码
1)将发送消息并成功消费。
2) 将发送主题并抛出错误,最后将移至 DLQ。
3) 将尝试从 DLQ(我陷入困境的地方)阅读和发布主题。
public class KafkaProducerConfig {
private String bootstrapAddress="localhost:9092";
private String groupId = "dlq_topics_group";
@Bean
public ProducerFactory<String, Object> producerFactory() {
Map<String, Object> configProps = new HashMap<String, Object>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,bootstrapAddress);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
return new DefaultKafkaProducerFactory<String, Object>(configProps);
}
@Bean
public KafkaTemplate<String, Object> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}
@EnableKafka
@Configuration
public class KafkaConsumerConfig {
private String bootstrapAddress="localhost:9092";
private String groupId = "dlq_topics_group";
@Autowired
KafkaTemplate<Object, Object> template;
private Integer maxRetry = 3;
private Integer concurrency=1;
private Integer pollTimeout=1000;
@Bean
public ConsumerFactory<Object, Object> consumerFactory() {
Map<String, Object> props = new HashMap<String, Object>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
return new DefaultKafkaConsumerFactory<>(props);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<Object, Object> kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(concurrency);
factory.getContainerProperties().setPollTimeout(pollTimeout);
factory.getContainerProperties().setSyncCommits(true);
factory.getContainerProperties().setAckOnError(false);
factory.setErrorHandler(new SeekToCurrentErrorHandler(new DeadLetterPublishingRecovererHandler(template), maxRetry));
configurer.configure(factory, consumerFactory());
return factory;
}
@Bean
public KafkaConsumer<Object, Object> kafkaConsumer() {
return new KafkaConsumer<(consumerFactory().getConfigurationProperties());
}
}
@Controller
@RequestMapping(value="/kafka/controller")
public class KafkaController {
@Autowired
MessageService messageService;
@PostMapping(value="/send/{message}")
public void sendMessage1(@PathVariable String message) {
messageService.sendMessage(message);
}
@PostMapping(value="/send-kafka-msg/{message}")
public void sendMessage(@PathVariable String message) {
messageService.sendMessage(message);
}
@PostMapping(value="/resend-dlq-topic")
public void reprocessDLQTopics() {
**//LOGIC TO READ DLQ AND REPROCESS THE TOPICS**
messageService.reprocessDLQTopics();
}
}
@Service
public class MessageService {
private static final Logger logger = LoggerFactory.getLogger(MessageService.class);
private static final String TOPIC_EX = "kafka-msg-topic-ex";
private static final String TOPIC = "kafka-msg-topic";
private static final String DLQ_TOPICS = "dql_topics";
private static final String DLQ_NOTIFICATION_TOPIC = "dlq_notification_topic";
@Autowired
KafkaTemplate<String, Object> kafkaTemplate;
@Autowired
KafkaAdmin kafkaAdmin;
@Autowired
KafkaConsumer<Object, Object> kafkaConsumer;
public void sendMessage(String message) {
logger.info("sending message ", message);
kafkaTemplate.send(TOPIC, message);
}
public void sendMessageEx(String message) {
logger.info("sending message ", message);
kafkaTemplate.send(TOPIC, message);
}
@KafkaListener(topics = { TOPIC, TOPIC_EX }, groupId = "group-id")
public void messageReciver(String message) {
logger.info("message receiver" +kafkaAdmin.getConfig().entrySet().parallelStream().toString());
throw new RuntimeException();
}
@KafkaListener(id = "DLQ_TOPICS_GROUP", topics = DLQ_TOPICS)
public void dltListen(String dlqMsg) {
logger.info("Received from DLT: " + dlqMsg.toString());
}
public void reprocessDLQTopics() {
**//LOGIC TO RE-PUBLISH DLQ TOPICS**
}
}
最佳答案
尚不清楚“重新发布 dlq 主题”的含义,但您可以使用 KafkaTemplate
将记录发送到任何主题(包括 DLQ)。
如果您的意思是从 DLQ 主题消费并重新发布到原始主题,那么是的,您也可以这样做。
编辑
这是一种解决方案:
@Component
class Config {
@Autowired
private KafkaTemplate<String, String> template;
@Autowired
private KafkaListenerEndpointRegistry registry;
@KafkaListener(id = "so57157134", topics = "so57157134")
public void listen(String in) {
System.out.println(in);
}
@KafkaListener(id = "so57157134_DLT", topics = "so57157134_DLT", autoStartup = "false")
public void listenDLT(String in) {
System.out.println("Re-publish DLT");
this.template.send("so57157134", in);
}
@EventListener
public void listenForIdleContainer(ListenerContainerIdleEvent event) {
System.out.println(event);
this.registry.getListenerContainer("so57157134_DLT").stop();
}
public void reprocessDLQTopics() {
this.registry.getListenerContainer("so57157134_DLT").start();
}
@Bean
public NewTopic topic() {
return new NewTopic("so57157134", 10, (short) 1);
}
@Bean
public NewTopic topicDLT() {
return new NewTopic("so57157134_DLT", 10, (short) 1);
}
}
唯一的问题是如果重播记录仍然失败会发生什么情况;容器永远不会闲置,所以你将永远循环。一种解决方案是以某种方式检测循环并在这种情况下停止容器。
关于java - 重新发布kafka DLQ主题,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57157134/