来源于尚硅谷,后期我自动更换成我的
KafkaUtil
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import javax.annotation.Nullable;
import java.util.Properties;
/**
* Author: Felix
* Date: 2021/1/30
* Desc: 操作Kafka的工具类
*/
public class MyKafkaUtil {
private static String KAFKA_SERVER = "hadoop202:9092,hadoop203:9092,hadoop204:9092";
private static String DEFAULT_TOPIC = "DEFAULT_DATA";
//获取FlinkKafkaConsumer
public static FlinkKafkaConsumer<String> getKafkaSource(String topic, String groupId) {
//Kafka连接的一些属性配置
Properties props = new Properties();
props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
return new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), props);
}
//封装FlinkKafkaProducer
public static FlinkKafkaProducer<String> getKafkaSink(String topic) {
return new FlinkKafkaProducer<String>(KAFKA_SERVER, topic, new SimpleStringSchema());
}
public static <T> FlinkKafkaProducer<T> getKafkaSinkBySchema(KafkaSerializationSchema<T> kafkaSerializationSchema) {
Properties props = new Properties();
props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,KAFKA_SERVER);
//设置生产数据的超时时间
props.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,15*60*1000+"");
return new FlinkKafkaProducer<T>(DEFAULT_TOPIC, kafkaSerializationSchema, props, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
}
//拼接Kafka相关属性到DDL
public static String getKafkaDDL(String topic,String groupId){
String ddl="'connector' = 'kafka', " +
" 'topic' = '"+topic+"'," +
" 'properties.bootstrap.servers' = '"+ KAFKA_SERVER +"', " +
" 'properties.group.id' = '"+groupId+ "', " +
" 'format' = 'json', " +
" 'scan.startup.mode' = 'latest-offset' ";
return ddl;
}
}
PhoenixUtil
//注意Class.forname()
DriverManager.getConnection()
conn.perpareStatement (sql)
ps.execute()
conn.commit 一定要提交,mysql是自动提交到,phoenix不能自动提交
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.common.GmallConfig;
import org.apache.commons.beanutils.BeanUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
* Author: Felix
* Date: 2021/2/5
* Desc: 从Phoenix中查询数据
*/
public class PhoenixUtil {
private static Connection conn = null;
public static void init(){
try {
//注册驱动
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
//获取Phoenix的连接
conn = DriverManager.getConnection(GmallConfig.PHOENIX_SERVER);
//指定操作的表空间
conn.setSchema(GmallConfig.HABSE_SCHEMA);
} catch (Exception e) {
e.printStackTrace();
}
}
// 从Phoenix中查询数据
// select * from 表 where XXX=xxx
public static <T> List<T> queryList(String sql,Class<T> clazz){
if(conn == null){
init();
}
List<T> resultList = new ArrayList<>();
PreparedStatement ps = null;
ResultSet rs = null;
try {
//获取数据库操作对象
ps = conn.prepareStatement(sql);
//执行SQL语句
rs = ps.executeQuery();
//通过结果集对象获取元数据信息
ResultSetMetaData metaData = rs.getMetaData();
//处理结果集
while (rs.next()){
//声明一个对象,用于封装查询的一条结果集
T rowData = clazz.newInstance();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
BeanUtils.setProperty(rowData,metaData.getColumnName(i),rs.getObject(i));
}
resultList.add(rowData);
}
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("从维度表中查询数据失败");
} finally {
//释放资源
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (ps != null) {
try {
ps.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
return resultList;
}
public static void main(String[] args) {
System.out.println(queryList("select * from DIM_BASE_TRADEMARK", JSONObject.class));
}
}
MySQLUtil
最好用mybaties
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.google.common.base.CaseFormat;
import org.apache.commons.beanutils.BeanUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
* Author: Felix
* Date: 2021/2/1
* Desc: 从MySQL数据中查询数据的工具类
* 完成ORM,对象关系映射
* O:Object对象 Java中对象
* R:Relation关系 关系型数据库
* M:Mapping映射 将Java中的对象和关系型数据库的表中的记录建立起映射关系
* 数据库 Java
* 表t_student 类Student
* 字段id,name 属性id,name
* 记录 100,zs 对象100,zs
* ResultSet(一条条记录) List(一个个Java对象)
*/
public class MySQLUtil {
/**
* @param sql 执行的查询语句
* @param clz 返回的数据类型
* @param underScoreToCamel 是否将下划线转换为驼峰命名法
* @param <T>
* @return
*/
public static <T> List<T> queryList(String sql, Class<T> clz, boolean underScoreToCamel) {
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try {
//注册驱动
Class.forName("com.mysql.jdbc.Driver");
//创建连接
conn = DriverManager.getConnection(
"jdbc:mysql://hadoop202:3306/gmall0820_realtime?characterEncoding=utf-8&useSSL=false",
"root",
"123456");
//创建数据库操作对象
ps = conn.prepareStatement(sql);
//执行SQL语句
// 100 zs 20
// 200 ls 30
rs = ps.executeQuery();
//处理结果集
//查询结果的元数据信息
// id student_name age
ResultSetMetaData metaData = rs.getMetaData();
List<T> resultList = new ArrayList<T>();
//判断结果集中是否存在数据,如果有,那么进行一次循环
while (rs.next()) {
//创建一个对象,用于封装查询出来一条结果集中的数据
T obj = clz.newInstance();
//对查询的所有列进行遍历,获取每一列的名称
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String columnName = metaData.getColumnName(i);
String propertyName = columnName;
if(underScoreToCamel){
//如果指定将下划线转换为驼峰命名法的值为 true,通过guava工具类,将表中的列转换为类属性的驼峰命名法的形式
propertyName = CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, columnName);
}
//调用apache的commons-bean中工具类,给obj属性赋值
BeanUtils.setProperty(obj,propertyName,rs.getObject(i));
}
//将当前结果中的一行数据封装的obj对象放到list集合中
resultList.add(obj);
}
return resultList;
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("从MySQL查询数据失败");
} finally {
//释放资源
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (ps != null) {
try {
ps.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
public static void main(String[] args) {
List<TableProcess> list = queryList("select * from table_process", TableProcess.class, true);
for (TableProcess tableProcess : list) {
System.out.println(tableProcess);
}
}
}
ClickHouseUtil(flink中,所有jdbc连接的数据库都可以用这个处理)
import com.atguigu.gmall.realtime.bean.TransientSink;
import com.atguigu.gmall.realtime.common.GmallConfig;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
* Author: Felix
* Date: 2021/2/23
* Desc: 操作ClickHouse的工具类
*/
public class ClickHouseUtil {
/**
* 获取向Clickhouse中写入数据的SinkFunction
*
* @param sql
* @param <T>
* @return
*/
public static <T> SinkFunction getJdbcSink(String sql) {
SinkFunction<T> sinkFunction = JdbcSink.<T>sink(
//要执行的SQL语句
sql,
//执行写入操作 就是将当前流中的对象属性赋值给SQL的占位符 insert into visitor_stats_0820 values(?,?,?,?,?,?,?,?,?,?,?,?)
new JdbcStatementBuilder<T>() {
//obj 就是流中的一条数据对象
@Override
public void accept(PreparedStatement ps, T obj) throws SQLException {
//获取当前类中 所有的属性
Field[] fields = obj.getClass().getDeclaredFields();
//跳过的属性计数
int skipOffset = 0;
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
//通过属性对象获取属性上是否有@TransientSink注解
TransientSink transientSink = field.getAnnotation(TransientSink.class);
//如果transientSink不为空,说明属性上有@TransientSink标记,那么在给?占位符赋值的时候,应该跳过当前属性
if (transientSink != null) {
skipOffset++;
continue;
}
//设置私有属性可访问
field.setAccessible(true);
try {
//获取属性值
Object o = field.get(obj);
ps.setObject(i + 1 - skipOffset, o);
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
},
//构建者设计模式,创建JdbcExecutionOptions对象,给batchSize属性赋值,执行执行批次大小
new JdbcExecutionOptions.Builder().withBatchSize(5).build(),
//构建者设计模式,JdbcConnectionOptions,给连接相关的属性进行赋值
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
.withUrl(GmallConfig.CLICKHOUSE_URL)
.withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
.build()
);
return sinkFunction;
}
}
@Target(FIELD) //作用范围 这里是定义字段
@Retention(RUNTIME) //作用的时刻,运行时
public @interface TransientSink {
}
DateTimeUtil
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.Date;
/**
* Author: Felix
* Date: 2021/2/20
* Desc: 日期转换的工具类
* SimpleDateFormat存在线程安全问题,底层调用 calendar.setTime(date);
* 解决:在JDK8,提供了DateTimeFormatter替代SimpleDateFormat
*
*/
public class DateTimeUtil {
public static final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
public static void main(String[] args) {
System.out.println(ZoneId.systemDefault());
}
/**
* 将Date日期转换为字符串
* @return
*/
public static String toYMDhms(Date date){
LocalDateTime localDateTime = LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
return dtf.format(localDateTime);
}
/**
* 将字符串日期转换为时间毫秒数
* @param dateStr
* @return
*/
public static Long toTs(String dateStr){
LocalDateTime localDateTime = LocalDateTime.parse(dateStr, dtf);
long ts = localDateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
return ts;
}
}
RedisUtil
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
/**
* Author: Felix
* Date: 2021/2/5
* Desc: 通过JedisPool连接池获取Jedis连接
*/
public class RedisUtil {
private static JedisPool jedisPool;
public static Jedis getJedis(){
if(jedisPool == null){
JedisPoolConfig jedisPoolConfig = new JedisPoolConfig();
jedisPoolConfig.setMaxTotal(100); //最大可用连接数
jedisPoolConfig.setBlockWhenExhausted(true); //连接耗尽是否等待
jedisPoolConfig.setMaxWaitMillis(2000); //等待时间
jedisPoolConfig.setMaxIdle(5); //最大闲置连接数
jedisPoolConfig.setMinIdle(5); //最小闲置连接数
jedisPoolConfig.setTestOnBorrow(true); //取连接的时候进行一下测试 ping pong
jedisPool = new JedisPool(jedisPoolConfig,"hadoop202",6379,10000);
}
return jedisPool.getResource();
}
public static void main(String[] args) {
Jedis jedis = getJedis();
System.out.println(jedis.ping());
}
}
线程池
package com.atguigu.gmall.realtime.utils;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* Author: Felix
* Date: 2021/2/19
* Desc: 创建单例的线程池对象的工具类
*/
public class ThreadPoolUtil {
private static ThreadPoolExecutor pool;
/**
* corePoolSize:指定了线程池中的线程数量,它的数量决定了添加的任务是开辟新的线程去执行,还是放到workQueue任务队列中去;
* maximumPoolSize:指定了线程池中的最大线程数量,这个参数会根据你使用的workQueue任务队列的类型,决定线程池会开辟的最大线程数量;
* keepAliveTime:当线程池中空闲线程数量超过corePoolSize时,多余的线程会在多长时间内被销毁;
* unit:keepAliveTime的单位
* workQueue:任务队列,被添加到线程池中,但尚未被执行的任务
* @return
*/
public static ThreadPoolExecutor getInstance(){
if(pool == null){
synchronized (ThreadPoolUtil.class){
if(pool == null){
pool = new ThreadPoolExecutor(
4,20,300, TimeUnit.SECONDS,new LinkedBlockingDeque<Runnable>(Integer.MAX_VALUE)
);
}
}
}
return pool;
}
}
ik分词
public class KeywordUtil {
//分词 将字符串进行分词,将分词之后的结果放到一个集合中返回
public static List<String> analyze(String text){
List<String> wordList = new ArrayList<>();
//将字符串转换为字符输入流
StringReader sr = new StringReader(text);
//创建分词器对象
IKSegmenter ikSegmenter = new IKSegmenter(sr, true);
// Lexeme 是分词后的一个单词对象
Lexeme lexeme = null;
//通过循环,获取分词后的数据
while(true){
try {
//获取一个单词
if((lexeme = ikSegmenter.next())!=null){
String word = lexeme.getLexemeText();
wordList.add(word);
}else{
break;
}
} catch (IOException e) {
e.printStackTrace();
}
}
return wordList;
}
public static void main(String[] args) {
String text = "尚硅谷大数据数仓";
System.out.println(KeywordUtil.analyze(text));
}
}
mybatis plus 自动生成代码
package com.markerhub.vueblog;
import com.baomidou.mybatisplus.core.exceptions.MybatisPlusException;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.generator.AutoGenerator;
import com.baomidou.mybatisplus.generator.InjectionConfig;
import com.baomidou.mybatisplus.generator.config.*;
import com.baomidou.mybatisplus.generator.config.po.TableInfo;
import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
import com.baomidou.mybatisplus.generator.engine.FreemarkerTemplateEngine;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
// 演示例子,执行 main 方法控制台输入模块表名回车自动生成对应项目目录中
public class CodeGenerator {
/**
* <p>
* 读取控制台内容
* </p>
*/
public static String scanner(String tip) {
Scanner scanner = new Scanner(System.in);
StringBuilder help = new StringBuilder();
help.append("请输入" + tip + ":");
System.out.println(help.toString());
if (scanner.hasNext()) {
String ipt = scanner.next();
if (StringUtils.isNotEmpty(ipt)) {
return ipt;
}
}
throw new MybatisPlusException("请输入正确的" + tip + "!");
}
public static void main(String[] args) {
// 代码生成器
AutoGenerator mpg = new AutoGenerator();
// 全局配置
GlobalConfig gc = new GlobalConfig();
String projectPath = System.getProperty("user.dir");
gc.setOutputDir(projectPath + "/src/main/java");
// gc.setOutputDir("D:\\test");
gc.setAuthor("zhenghaozhe");
gc.setOpen(false);
// gc.setSwagger2(true); 实体属性 Swagger2 注解
gc.setServiceName("%sService");
mpg.setGlobalConfig(gc);
// 数据源配置
DataSourceConfig dsc = new DataSourceConfig();
dsc.setUrl("jdbc:mysql://localhost:3306/vueblog?useUnicode=true&useSSL=false&characterEncoding=utf8&serverTimezone=UTC");
// dsc.setSchemaName("public");
dsc.setDriverName("com.mysql.cj.jdbc.Driver");
dsc.setUsername("root");
dsc.setPassword("root");
mpg.setDataSource(dsc);
// 包配置
PackageConfig pc = new PackageConfig();
pc.setModuleName(null);
pc.setParent("com.markerhub.vueblog");
mpg.setPackageInfo(pc);
// 自定义配置
InjectionConfig cfg = new InjectionConfig() {
@Override
public void initMap() {
// to do nothing
}
};
// 如果模板引擎是 freemarker
String templatePath = "/templates/mapper.xml.ftl";
// 如果模板引擎是 velocity
// String templatePath = "/templates/mapper.xml.vm";
// 自定义输出配置
List<FileOutConfig> focList = new ArrayList<>();
// 自定义配置会被优先输出
focList.add(new FileOutConfig(templatePath) {
@Override
public String outputFile(TableInfo tableInfo) {
// 自定义输出文件名 , 如果你 Entity 设置了前后缀、此处注意 xml 的名称会跟着发生变化!!
return projectPath + "/src/main/resources/mapper/"
+ "/" + tableInfo.getEntityName() + "Mapper" + StringPool.DOT_XML;
}
});
cfg.setFileOutConfigList(focList);
mpg.setCfg(cfg);
// 配置模板
TemplateConfig templateConfig = new TemplateConfig();
templateConfig.setXml(null);
mpg.setTemplate(templateConfig);
// 策略配置
StrategyConfig strategy = new StrategyConfig();
strategy.setNaming(NamingStrategy.underline_to_camel);
strategy.setColumnNaming(NamingStrategy.underline_to_camel);
strategy.setEntityLombokModel(true);
strategy.setRestControllerStyle(true);
strategy.setInclude(scanner("表名,多个英文逗号分割").split(","));
strategy.setControllerMappingHyphenStyle(true);
strategy.setTablePrefix("m_");
mpg.setStrategy(strategy);
mpg.setTemplateEngine(new FreemarkerTemplateEngine());
mpg.execute();
}
}
Mybaties配置多数据源
@Configuration
@MapperScan(basePackages = {"com.bigdata.mlearn.mapper.phoenix"}, sqlSessionFactoryRef = "sqlSessionFactoryArticle")
public class PhoenixArticleDataSource {
@Bean(name = "dataSourceArticle")
@ConfigurationProperties(prefix = "spring.datasource.phoenix.article")
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "sqlSessionFactoryArticle")
public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSourceArticle") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/phoenix/*.xml"));
return bean.getObject();
}
@Bean(name = "transactionManagerArticle")
public DataSourceTransactionManager transactionManager(@Qualifier("dataSourceArticle") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSessionTemplateArticle")
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactoryArticle") SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
}
Mybaties plus配置多数据源
package com.bigdata.mlearn.config;
import com.baomidou.mybatisplus.core.MybatisConfiguration;
import com.baomidou.mybatisplus.core.MybatisXMLLanguageDriver;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.type.JdbcType;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.mybatis.spring.transaction.SpringManagedTransactionFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
/**
* @author zhz
* @date 2021/10/14
*/
@Configuration
@MapperScan(basePackages = {"com.bigdata.mlearn.mapper.mysql"}, sqlSessionFactoryRef = "sqlSessionFactory")
public class MysqlClientUserDataSource {
@Bean(name = "dataSource")
@ConfigurationProperties(prefix = "spring.datasource.mysql.clientuser")
@Primary
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "sqlSessionFactory")
@Primary
public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setTypeAliasesPackage("com.bigdata.mlearn.mapper.mysql");
MybatisConfiguration configuration = new MybatisConfiguration();
configuration.setDefaultScriptingLanguage(MybatisXMLLanguageDriver.class);
configuration.setJdbcTypeForNull(JdbcType.NULL);
bean.setConfiguration(configuration);
bean.setTransactionFactory(new SpringManagedTransactionFactory());
return bean.getObject();
}
@Bean(name = "transactionManager")
@Primary
public DataSourceTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSessionTemplate")
@Primary
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
}
restTemplateConfig 调用别人接口
package com.bigdata.mlearn.config;
import org.apache.http.Header;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.impl.client.DefaultConnectionKeepAliveStrategy;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.web.client.RestTemplate;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* @author zhz
* @date 2021/11/19
*/
@Configuration
public class RestTemplateConfig {
private static final Logger logger= LoggerFactory.getLogger(RestTemplateConfig.class);
@Bean
public RestTemplate restTemplate() {
// 添加内容转换器,使用默认的内容转换器
RestTemplate restTemplate = new RestTemplate(httpRequestFactory());
// 设置编码格式为UTF-8
List<HttpMessageConverter<?>> converterList = restTemplate.getMessageConverters();
HttpMessageConverter<?> converterTarget = null;
for (HttpMessageConverter<?> item : converterList) {
if (item.getClass() == StringHttpMessageConverter.class) {
converterTarget = item;
break;
}
}
if (converterTarget != null) {
converterList.remove(converterTarget);
}
HttpMessageConverter<?> converter = new StringHttpMessageConverter(StandardCharsets.UTF_8);
converterList.add(1,converter);
return restTemplate;
}
@Bean
public ClientHttpRequestFactory httpRequestFactory() {
return new HttpComponentsClientHttpRequestFactory(httpClient());
}
@Bean
public HttpClient httpClient() {
// 长连接保持30秒
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(30, TimeUnit.SECONDS);
//设置整个连接池最大连接数 根据自己的场景决定
connectionManager.setMaxTotal(500);
//同路由的并发数,路由是对maxTotal的细分
connectionManager.setDefaultMaxPerRoute(500);
//requestConfig
RequestConfig requestConfig = RequestConfig.custom()
//服务器返回数据(response)的时间,超过该时间抛出read timeout
.setSocketTimeout(10000)
//连接上服务器(握手成功)的时间,超出该时间抛出connect timeout
.setConnectTimeout(5000)
//从连接池中获取连接的超时时间,超过该时间未拿到可用连接,会抛出org.apache.http.conn.ConnectionPoolTimeoutException: Timeout waiting for connection from pool
.setConnectionRequestTimeout(500)
.build();
//headers
List<Header> headers = new ArrayList<>();
headers.add(new BasicHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36"));
headers.add(new BasicHeader("Accept-Encoding", "gzip,deflate"));
headers.add(new BasicHeader("Accept-Language", "zh-CN"));
headers.add(new BasicHeader("Connection", "Keep-Alive"));
headers.add(new BasicHeader("Content-type", "application/json;charset=UTF-8"));
return HttpClientBuilder.create()
.setDefaultRequestConfig(requestConfig)
.setConnectionManager(connectionManager)
.setDefaultHeaders(headers)
// 保持长连接配置,需要在头添加Keep-Alive
.setKeepAliveStrategy(new DefaultConnectionKeepAliveStrategy())
//重试次数,默认是3次,没有开启
.setRetryHandler(new DefaultHttpRequestRetryHandler(2, true))
.build();
}
@Bean
public RestTemplate customRestTemplate(){
HttpComponentsClientHttpRequestFactory httpComponentsClientHttpRequestFactory = new HttpComponentsClientHttpRequestFactory();
httpComponentsClientHttpRequestFactory.setConnectionRequestTimeout(10);
httpComponentsClientHttpRequestFactory.setConnectTimeout(10);
httpComponentsClientHttpRequestFactory.setReadTimeout(10);
return new RestTemplate(httpComponentsClientHttpRequestFactory);
}
}
调用
public JSONPObject getCateGory(String title) {
String url = "http://127.0.0.1:9999/hanLpType/TypeData";
Map<String, Object> paramMap = new HashMap<String, Object>();
paramMap.put("article", title);
// String result1 = restTemplate.getForObject(url, String.class, paramMap);
JSONPObject result = restTemplate.postForObject(url, paramMap, JSONPObject.class);
return result;
}
RedisUtils
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson</artifactId>
<version>3.12.0</version>
</dependency>
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Component;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.JedisPoolConfig;
import javax.annotation.PostConstruct;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
/**
* @Description:
* @CreateDate: 2021-06-02 17:50
* @Author: xgp
*/
@Slf4j
@Component
@Data
@ConfigurationProperties(prefix = "redis")
public class RedisUtils {
private String address;
private String pwd;
private static RedisUtils redisUtils;
JedisCluster jedisCluster = null;
public static RedisUtils build() {
if (null == redisUtils) {
redisUtils = new RedisUtils();
}
return redisUtils;
}
/**
* 初始化工具类配置
*/
@PostConstruct
private void initializer() {
JedisPoolConfig config = new JedisPoolConfig();
config .setMaxTotal(500);
config .setMinIdle(2);
config .setMaxIdle(500);
config .setMaxWaitMillis(10000);
config .setTestOnBorrow(true);
config .setTestOnReturn(true);
Set<HostAndPort> nodes = new HashSet<>();
final String redisAddress=address;
try {
nodes = getRedisAddress(redisAddress);
} catch (MalformedURLException e) {
log.error("redis connect error");
e.printStackTrace();
}
jedisCluster = new JedisCluster(nodes, 10000, 10000, 100, pwd, config);
}
public Set<HostAndPort> getRedisAddress(String hosts) throws MalformedURLException {
String[] hostList = hosts.split(",");
Set<HostAndPort> addresses = new HashSet<>();
for (String host : hostList) {
if (host.startsWith("http")) {
URL url = new URL(host);
addresses.add(new HostAndPort(url.getHost(), url.getPort()));
} else {
String[] parts = host.split(":", 2);
if (parts.length > 1) {
addresses.add(new HostAndPort(parts[0], Integer.parseInt(parts[1])));
} else {
throw new MalformedURLException("invalid elasticsearch hosts format");
}
}
}
return addresses;
}
public JedisCluster getJedisCluster() {
return jedisCluster;
}
/**
* 解析配置文件的 es hosts
*
* @param hosts
* @return
* @throws MalformedURLException
*/
public static Set<InetSocketAddress> getRedisAddresses(String hosts) throws MalformedURLException {
String[] hostList = hosts.split(",");
Set<InetSocketAddress> addresses = new HashSet<>();
for (String host : hostList) {
String[] parts = host.split(":", 2);
if (parts.length > 1) {
addresses.add(new InetSocketAddress(parts[0], Integer.parseInt(parts[1])));
} else {
throw new MalformedURLException("invalid elasticsearch hosts format");
}
}
return addresses;
}
}
调用
JedisCluster jedisCluster = RedisUtils.build().getJedisCluster();
jedisCluster.hget();