Reputation: 1658
I would like to use Spring Services in order to send data to different Kafka messages based on configuration:
ResponseFactory processingPeply = null;
switch(endpointType)
{
case "email":
ProducerRecord<String, Object> record = new ProducerRecord<>("tp-email.request", tf);
RequestReplyFuture<String, Object, Object> replyFuture = processingTransactionEmailReplyKafkaTemplate.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
processingPeply = (ResponseFactory) consumerRecord.value();
break;
case "sms":
ProducerRecord<String, Object> record = new ProducerRecord<>("tp-sms.request", tf);
RequestReplyFuture<String, Object, Object> replyFuture = processingTransactionSmsReplyKafkaTemplate.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
processingPeply = (ResponseFactory) consumerRecord.value();
break;
case "network":
ProducerRecord<String, Object> record = new ProducerRecord<>("tp-network.request", tf);
RequestReplyFuture<String, Object, Object> replyFuture = processingTransactionNetworkReplyKafkaTemplate.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
processingPeply = (ResponseFactory) consumerRecord.value();
break;
default:
processingPeply = ResponseFactory.builder().status("error").build();
}
I currently get:
Do you know how I can redesign the code in some better way so I can solve the issue? I would like to use DRY principle with Spring Service in order to reduce the code.
Upvotes: 2
Views: 815
Reputation: 950
I think you can use interfaces to separate logic of sending data to different endpoints. Take a look to code below:
Main class that sends data and receive Response. It doesn't know anything about email, SMS, network senders.
package com.example.demo.service;
import com.example.demo.dto.Response;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class KafkaSender {
private final List<EndpointSender> senders;
public KafkaSender(List<EndpointSender> senders) {
this.senders = senders;
}
public Response send(Object data, String endpoint) {
return senders
.stream()
.filter(it -> it.supports(endpoint))
.findAny()
.map(it -> it.send(data))
.orElseGet(() -> new Response("error"));
}
}
Then we create interface like this:
package com.example.demo.service;
import com.example.demo.dto.Response;
public interface EndpointSender {
Response send(Object obj);
boolean supports(String endpoint);
}
And implementations:
Base class to reduce boilplate code:
package com.example.demo.service.sender;
import com.example.demo.dto.Response;
import com.example.demo.service.EndpointSender;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.requestreply.RequestReplyFuture;
import org.springframework.kafka.support.SendResult;
import java.util.concurrent.TimeUnit;
public abstract class BaseSender implements EndpointSender {
public abstract ProducerRecord<String, Object> getRecord(Object obj);
public abstract ReplyingKafkaTemplate<String, Object, Object> kafkaTemplate();
@Override
public Response send(Object obj) {
try {
RequestReplyFuture<String, Object, Object> replyFuture = kafkaTemplate().sendAndReceive(getRecord(obj));
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
return (Response) consumerRecord.value();
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
}
And implementations for senders: Email sender:
package com.example.demo.service.sender;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.stereotype.Service;
@Service
public class EmailSender extends BaseSender {
private final ReplyingKafkaTemplate<String, Object, Object> processingTransactionEmailReplyKafkaTemplate;
public EmailSender(ReplyingKafkaTemplate<String, Object, Object> processingTransactionEmailReplyKafkaTemplate) {
this.processingTransactionEmailReplyKafkaTemplate = processingTransactionEmailReplyKafkaTemplate;
}
@Override
public boolean supports(String endpoint) {
return "email".equals(endpoint);
}
@Override
public ProducerRecord<String, Object> getRecord(Object obj) {
return new ProducerRecord<>("tp-email.request", obj);
}
@Override
public ReplyingKafkaTemplate<String, Object, Object> kafkaTemplate() {
return processingTransactionEmailReplyKafkaTemplate;
}
}
Sms sender:
package com.example.demo.service.sender;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.stereotype.Service;
@Service
public class SmsSender extends BaseSender{
private final ReplyingKafkaTemplate<String, Object, Object> processingTransactionSmsReplyKafkaTemplate;
public SmsSender(ReplyingKafkaTemplate<String, Object, Object> processingTransactionSmsReplyKafkaTemplate) {
this.processingTransactionSmsReplyKafkaTemplate = processingTransactionSmsReplyKafkaTemplate;
}
@Override
public boolean supports(String endpoint) {
return "sms".equals(endpoint);
}
@Override
public ProducerRecord<String, Object> getRecord(Object obj) {
return new ProducerRecord<>("tp-sms.request", obj);
}
@Override
public ReplyingKafkaTemplate<String, Object, Object> kafkaTemplate() {
return processingTransactionSmsReplyKafkaTemplate;
}
}
Network sender:
package com.example.demo.service.sender;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.stereotype.Service;
@Service
public class NetworkSender extends BaseSender{
private final ReplyingKafkaTemplate<String, Object, Object> processingTransactionNetworkReplyKafkaTemplate;
public NetworkSender(ReplyingKafkaTemplate<String, Object, Object> processingTransactionNetworkReplyKafkaTemplate) {
this.processingTransactionNetworkReplyKafkaTemplate = processingTransactionNetworkReplyKafkaTemplate;
}
@Override
public boolean supports(String endpoint) {
return "network".equals(endpoint);
}
@Override
public ProducerRecord<String, Object> getRecord(Object obj) {
return new ProducerRecord<>("tp-network.request", obj);
}
@Override
public ReplyingKafkaTemplate<String, Object, Object> kafkaTemplate() {
return processingTransactionNetworkReplyKafkaTemplate;
}
}
Upvotes: 1
Reputation: 530
Applying KISS but not so DRY... Put each code blocks from each case into brackets
case "email": {
...
}
break;
...
By doing this you reducing the case's scopes then you can reuse the same variable names.
Upvotes: 1
Reputation: 75914
You could autowire all the ReplyingKafkaTemplate
and look up the one matching your endpoint type.
@Autowired
private List<ReplyingKafkaTemplate<String, Object, Object>> templates;
ReplyingKafkaTemplate<String, Object, Object> template = null;
for(ReplyingKafkaTemplate<String, Object, Object> replyingKafkaTemplate : templates) {
String defaultTopic = replyingKafkaTemplate.getDefaultTopic();
if (defaultTopic.contains(endpointType)) {
template = replyingKafkaTemplate;
break;
}
}
ProducerRecord<String, Object> record = new ProducerRecord<>(template.getDefaultTopic(), tf);
RequestReplyFuture<String, Object, Object> replyFuture = template.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
ResponseFactory processingPeply = (ResponseFactory) consumerRecord.value();
You could also set up your configuration in such a way to create a bean of look up types followed by injecting Map<String, ReplyingKafkaTemplate>
for easy look up. Since I don't know your set up I can't provide the configuration set up for you.
@Autowired
private Map<String, ReplyingKafkaTemplate<String, Object, Object>>> templates;
ReplyingKafkaTemplate<String, Object, Object> template = templates.get(endpointType);
ProducerRecord<String, Object> record = new ProducerRecord<>(template.getDefaultTopic(), tf);
RequestReplyFuture<String, Object, Object> replyFuture = template.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
ResponseFactory processingPeply = (ResponseFactory) consumerRecord.value();
Upvotes: 2