一、通过Jhipster新建微服务
1、 运行yo jhipster 命令,选择加入的组件生成组件
2、 打开新生成的工程,首先修改application-dev.yml文件
并在hosts中配置映射路径
这样方便部署测试生产时不需要更改配置文件,只需要动域名映射就好了。
二、简单集成Kafka
大多数情况下,我们需要与其他服务之间进行数据交互的,用的比较多的就是feign或者通过消息中间件。这里以kafka为例。
1、首先pom.xml中引入依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.1.3.RELEASE</version>
</dependency>
或手动引入
2、配置文件application.yml指定kafka的域名路径。
3、添加配置类
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${application.kafka-servers}")
private String kafkaServers;
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "10000");
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, "cmplatform");
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
return propsMap;
}
@Bean
public KafkaListenerService listener() {
return new KafkaListenerService();
}
}
@Configuration
@EnableKafka
public class KafkaProducerConfig {
@Value("${application.kafka-servers}")
private String kafkaServers;
@Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers);
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 4096);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 40*1024*1024);
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 40*1024*1024);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
@Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<String, String>(producerFactory());
}
}
4、使用
producer:我是自定义了一个发送service,也可以直接用的
@Service
public class KafkaSendService {
@Autowired
private KafkaTemplate template;
public void send(String topic, String json) {
template.send(topic, json);
}
}
consumer:
@KafkaListener(topics = "HeadToBm")
public void listener(ConsumerRecord<?, ?> record) {
Optional<?> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
String message = kafkaMessage.get().toString();
}
}