You can use the KafkaTransactionManager with normal Spring transaction support (@Transactional, TransactionTemplate etc). If a transaction is active, any KafkaTemplate operations performed within the scope of the transaction will use the transaction’s Producer. The manager will commit or rollback the transaction depending on success or failure. The KafkaTemplate must be configured to use the same ProducerFactory as the transaction manager.
package shangbo.kafka.example9;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class App {
@SuppressWarnings({ "resource" })
public static void main(String[] args) {
ApplicationContext context = new AnnotationConfigApplicationContext(AppConfig.class);
Service service = context.getBean(Service.class);
// 发送
service.send("topic0", "message x5");
}
}
package shangbo.kafka.example9;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.transaction.KafkaTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
@Configuration
@EnableTransactionManagement // 开启事务管理
public class AppConfig {
@Bean
public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> factory) {
return new KafkaTemplate<String, String>(factory);
}
@Bean
public ProducerFactory<String, String> producerFactory() {
DefaultKafkaProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(producerConfigs());
producerFactory.setTransactionIdPrefix("test.transaction");
return producerFactory;
}
@Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
return props;
}
@Bean
public KafkaTransactionManager<String, String> kafkaTransactionManager(ProducerFactory<String, String> factory) {
return new KafkaTransactionManager<String, String>(factory);
}
@Bean
public Service service(KafkaTemplate<String, String> kafkaTemplate) {
Service service = new ServiceImpl();
service.setKafkaTemplate(kafkaTemplate);
return service;
}
}
package shangbo.kafka.example9;
import org.springframework.kafka.core.KafkaTemplate;
public interface Service {
void send(String topic, String message);
void setKafkaTemplate(KafkaTemplate<String, String> kafkaTemplate);
}
package shangbo.kafka.example9;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.transaction.annotation.Transactional;
public class ServiceImpl implements Service {
private KafkaTemplate<String, String> kafkaTemplate;
@Override
@Transactional
public void send(String topic, String message) {
kafkaTemplate.send(topic, message);
kafkaTemplate.flush();
}
@Override
public void setKafkaTemplate(KafkaTemplate<String, String> kafkaTemplate) {
this.kafkaTemplate = kafkaTemplate;
}
}