Windows安装ElasticSearch7.X及Springboot整合

Windows安装ElasticSearch7.X及Springboot整合

windows安装ElasticSearch7.X

版本:
elasticsearch-7.13.1-windows-x86_64
kibana-7.13.1-windows-x86_64
Logstash 7.13.1
下载地址:https://www.elastic.co/cn/downloads/past-releases

一、下载安装head插件

下载地址:https://codechina.csdn.net/mirrors/mobz/elasticsearch-head?utm_source=csdn_github_accelerator

git clone git://github.com/mobz/elasticsearch-head.git
cd elasticsearch-head
npm install
npm run start

访问地址:http://localhost:9100/

二、打开kibana

访问地址:http://localhost:5601/

三、创建索引

PUT /jcms_dict
{
  "settings": {
    "number_of_shards": 3,
    "number_of_replicas": 1,
    "analysis": {
      "analyzer": {
        "ik": {
          "tokenizer": "ik_max_word"
        }
      }
    }
  },
  "mappings": {
    "properties": {
      "i_id": {
        "type": "long"
      },
      "vcname": {
        "type": "text",
        "search_analyzer":"ik_max_word",
        "analyzer": "ik_smart"
      },
      "ft": {
        "type": "text",
        "search_analyzer":"ik_max_word",
        "analyzer": "ik_smart"
      } ,
       "taskcode": {
        "type": "text" 
      } ,
       "catalogname": {
        "type": "text",
        "search_analyzer":"ik_max_word",
        "analyzer": "ik_smart"
      } 
    }
  }
}
PUT /Lol
{
  "settings": {
    "number_of_shards": 1,
    "number_of_replicas": 1,
    "analysis": {
      "analyzer": {
        "ik": {
          "tokenizer": "ik_max_word"
        }
      }
    }
  },
  "mappings": {
    "properties": {
      "id": {
        "type": "long"
      },
      "name": {
        "type": "text",
        "search_analyzer":"ik_max_word",
        "analyzer": "ik_smart"
      },
      "realName": {
        "type": "text",
        "search_analyzer":"ik_max_word",
        "analyzer": "ik_smart"
      } ,
       "desc": {
        "type": "text" 
      }  
    }
  }
}

四、本机启动

启动ElasticSearch
cd C:\myWork\dev_env\ELK\ELK7.13.1\elasticsearch\elasticsearch-7.13.1-windows-x86_64\elasticsearch-7.13.1\bin

elasticsearch.bat
启动head
cd C:\myWork\dev_env\ELK\ELK7.13.1\elasticsearch\elasticsearch-head
npm run start

Springboot整合ElasticSearch7.X

一、新建Springboot项目

二、pom.xml添加依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.6.7</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.dt</groupId>
    <artifactId>es</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>es</name>
    <description>Demo project for Spring Boot</description>
    <properties>
        <java.version>1.8</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>


        <!--  此处注意了,如果需要换低版本,需要改三个version   -->
        <dependency>
            <groupId>org.elasticsearch.client</groupId>
            <artifactId>elasticsearch-rest-high-level-client</artifactId>
            <version>7.11.2</version>
        </dependency>

        <!--   低级版不用也可以  -->
        <dependency>
            <groupId>org.elasticsearch.client</groupId>
            <artifactId>elasticsearch-rest-client</artifactId>
            <version>7.11.2</version>
        </dependency>
        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
            <version>7.11.2</version>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>8.0.11</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-to-slf4j</artifactId>
            <version>2.17.0</version>
            <exclusions>
                <exclusion>
                    <groupId>org.apache.logging.log4j</groupId>
                    <artifactId>log4j-api</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>
</project>

三、新建application.yml 配置web访问路径

server:
  servlet:
    context-path: /springboot-es

四、新建elasticsearch.properties配置Elasticsearch集群信息

elasticsearch.hosts=127.0.0.1:9200
elasticsearch.username=test
elasticsearch.password=12345

五、创建数据库操作工具类

package com.dt.es.util;

import java.sql.Connection;
import java.sql.DriverManager;

/**
 * 类说明:数据库操作工具
 */
public class DBHelper {

    public static final String url = "jdbc:mysql://127.0.0.1:3306/xxxx?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false";
    public static final String name = "com.mysql.cj.jdbc.Driver";
    public static final String user = "root";
    public static final String password = "123456";

    public static Connection conn = null;

    public static Connection getConn() {
        try {
            Class.forName(name);
            conn = DriverManager.getConnection(url, user, password);// 获取连接
        } catch (Exception e) {
            e.printStackTrace();
        }
        return conn;
    }
}

六、创建ES操作工具类

package com.dt.es.util;

import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.*;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;

import java.io.IOException;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;

/**
 * 类说明:ES操作工具类
 */
public class EsUtil {
    private static final Logger logger = LogManager.getLogger(com.dt.es.util.EsUtil.class);

    /**
     * 获取getRestHighLevelClient 对象
     *
     * @param hostname 主机名称你
     * @param port     端口
     * @param schema   协议
     * @return
     */
    public RestHighLevelClient getRestHighLevelClient(String hostname, int port, String schema) {
        RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost(hostname, port, schema)));// 初始化
        return client;
    }

    /**
     * 获得批量操作对象
     *
     * @param client
     * @return
     */
    public static BulkProcessor getBulkProcessor(RestHighLevelClient client) {

        BulkProcessor bulkProcessor = null;
        try {

            BulkProcessor.Listener listener = new BulkProcessor.Listener() {
                @Override
                public void beforeBulk(long executionId, BulkRequest request) {
                    logger.info("Try to insert data number : " + request.numberOfActions());
                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
                    logger.info("************** Success insert data number : " + request.numberOfActions() + " , id: "
                            + executionId);
                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
                    logger.error("Bulk is unsuccess : " + failure + ", executionId: " + executionId);
                }
            };

            BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer = (request, bulkListener) -> client
                    .bulkAsync(request, RequestOptions.DEFAULT, bulkListener);

            BulkProcessor.Builder builder = BulkProcessor.builder(bulkConsumer, listener);
            builder.setBulkActions(10000);
            builder.setBulkSize(new ByteSizeValue(300L, ByteSizeUnit.MB));
            builder.setConcurrentRequests(10);
            builder.setFlushInterval(TimeValue.timeValueSeconds(100L));
            builder.setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(1L), 3));
            // 注意点:在这里感觉有点坑,官网样例并没有这一步,而笔者因一时粗心也没注意,在调试时注意看才发现,上面对builder设置的属性没有生效
            bulkProcessor = builder.build();

        } catch (Exception e) {
            e.printStackTrace();
            try {
                bulkProcessor.awaitClose(100L, TimeUnit.SECONDS);
                client.close();
            } catch (Exception e1) {
                logger.error(e1.getMessage());
            }

        }
        return bulkProcessor;
    }

    /**
     * 创建索引
     *
     * @param indexName
     * @throws IOException
     */
    public static void createIndex(String indexName) throws IOException {
        RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost("127.0.0.1", 9200, "http")));// 初始化
        CreateIndexRequest requestIndex = new CreateIndexRequest(indexName.toLowerCase());// 创建索引
        // 创建的每个索引都可以有与之关联的特定设置。设置副本数与刷新时间对于索引数据效率有不小的提升
        requestIndex.settings(Settings.builder()
                // 分片数量5
                .put("index.number_of_shards", 5)
                // 副本数量0
                .put("index.number_of_replicas", 0)
                // 未设置刷新时间
                .put("index.refresh_interval", "-1"));
        CreateIndexResponse createIndexResponse = client.indices().create(requestIndex, RequestOptions.DEFAULT);
        logger.info("isAcknowledged:" + createIndexResponse.isAcknowledged());
        logger.info("isShardsAcknowledged:" + createIndexResponse.isShardsAcknowledged());
        client.close();
    }

    /**
     * 单属性等值查询 注意区分大小写
     *
     * @param indexName 索引名称
     * @param typeName  类型
     */
    public void matchQuery(String indexName, String typeName, String conditionName, String conditionValue) {
        RestHighLevelClient client = this.getRestHighLevelClient("127.0.0.1", 9200, "http");
        SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
        SearchRequest rq = new SearchRequest();
        //索引
        rq.indices(indexName);
        //各种组合条件
        rq.source(sourceBuilder);
        sourceBuilder.from(0);
        sourceBuilder.size(10);
        MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(conditionName, conditionValue);
        BoolQueryBuilder boolBuilder = QueryBuilders.boolQuery();
        boolBuilder.must(matchQueryBuilder);
        sourceBuilder.query(boolBuilder);
        rq.source(sourceBuilder);
        //请求
        try {
            System.out.println("rq:" + rq.source().toString());
            SearchResponse response = client.search(rq, RequestOptions.DEFAULT);
            System.out.println("response:" + response);
            // 获得返回结果
            SearchHits hits = response.getHits();
            if (hits.getTotalHits().value > 0)// 说明有返回结果
                System.out.println("命中:" + hits.getTotalHits().value);
            for (int i = 0; i < hits.getHits().length; i++) {
                System.out.println("matchQuery===========" + hits.getHits()[i].getSourceAsString());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        try {
            client.close();
        } catch (IOException e) {
        }
    }
}

七、创建ES集群配置根据类(spring容器启动)

package com.dt.es.util;

import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;

import java.io.IOException;
import java.util.Properties;

/**
 * ES集群配置根据类(spring容器启动)
 */
@Slf4j
public class ElasticsearchConfig {

    @Setter
    private static Connect connect = new Connect();

    private static volatile RestHighLevelClient esClient;

    private static final byte[] NEWS_LOCK = new byte[0];

    static {
        Properties props = new Properties();
        try {
            props.load(ElasticsearchConfig.class.getResourceAsStream("/elasticsearch.properties"));
        } catch (IOException e) {
            log.error("load elasticsearch config properties exception", e);
        }
        String newsHost = props.getProperty("elasticsearch.hosts");
        String newsUserName = props.getProperty("elasticsearch.username");
        String newsPassword = props.getProperty("elasticsearch.password");
        connect.setHosts(newsHost);
        connect.setUsername(newsUserName);
        connect.setPassword(newsPassword);
        esClient = init(connect);
    }

    /**
     * 初始化客户端
     */
    private static RestHighLevelClient init(Connect connect) {
        String[] hosts = connect.getHosts().split(",");
        HttpHost[] httpHosts = new HttpHost[hosts.length];
        for (int i = 0; i < hosts.length; i++) {
            httpHosts[i] = HttpHost.create(hosts[i]);
        }
        RestClientBuilder restClientBuilder = RestClient.builder(httpHosts);
        //配置身份验证
      /*  final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
        credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(connect.getUsername(), connect.getPassword()));
        restClientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));*/
        return new RestHighLevelClient(restClientBuilder);
    }

    @Setter
    @Getter
    public static class Connect {
        private String hosts;
        private String username;
        private String password;
    }

    /**
     * 获取ES集群客户端
     */
    public static RestHighLevelClient getEsClient() {
        if (esClient == null) {
            synchronized (NEWS_LOCK) {
                if (esClient == null) {
                    esClient = init(connect);
                    return esClient;
                }
            }
        }
        return esClient;
    }

}

八、创建操作类:通过BulkProcess批量将Mysql数据导入ElasticSearch中

package com.dt.es.operater;

import com.dt.es.util.DBHelper;
import com.dt.es.util.ElasticsearchConfig;
import com.dt.es.util.EsUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RestHighLevelClient;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.TimeUnit;

/**
 * 类说明:通过BulkProcess批量将Mysql数据导入ElasticSearch中
 */
@SuppressWarnings("all")
public class BulkProcess {

    private static final Logger logger = LogManager.getLogger(BulkProcess.class);

    public EsUtil esUtil = new EsUtil();

    /**
     * 将mysql 数据查出组装成es需要的map格式,通过批量写入es中
     *
     * @param tableName 数据库表名
     * @param indexName 索引名称
     * @param typeName  类别名称
     * @param limit 每次从数据库查询的个数
     */
    public void writeMysqlDataToES(String tableName, String indexName, String typeName,String limit ) {
//        RestHighLevelClient client = esUtil.getRestHighLevelClient("127.0.0.1", 9200, "http");
        RestHighLevelClient client = ElasticsearchConfig.getEsClient();
        BulkProcessor bulkProcessor = esUtil.getBulkProcessor(client);

        Connection conn = null;
        PreparedStatement ps = null;
        ResultSet rs = null;
        try {
            conn = DBHelper.getConn();
            logger.info("Start handle data :" + tableName);
            String sql = "select id, name ,taskcode ,ft  from " + tableName + " limit "+limit;
            ps = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            ps.setFetchSize(Integer.MIN_VALUE);
            rs = ps.executeQuery();
            ResultSetMetaData colData = rs.getMetaData();
            ArrayList<HashMap<String, String>> dataList = new ArrayList<HashMap<String, String>>();
            HashMap<String, String> map = null;
            int count = 0;
            String c = null;
            String v = null;
            while (rs.next()) {
                count++;
                map = new HashMap<String, String>(100);
                for (int i = 1; i <= colData.getColumnCount(); i++) {
                    c = colData.getColumnName(i);
                    v = rs.getString(c);
                    map.put(c, v);
                }
                dataList.add(map);
                // 每20万条写一次,不足的批次的最后再一并提交
                // 根据具体条件修改,笔者最高200万
                if (count % 200000 == 0) {
                    logger.info("Mysql handle data number : " + count);
                    // 写入ES
                    for (HashMap<String, String> hashMap2 : dataList) {
                        bulkProcessor.add(new IndexRequest(indexName, typeName, hashMap2.get("id")).source(hashMap2));
                    }
                    // 每提交一次便将map与list清空
                    map.clear();
                    dataList.clear();
                }
            }
            // count % 200000 处理未提交的数据
            for (HashMap<String, String> hashMap2 : dataList) {
//                System.out.println(hashMap2.get("id"));
//                System.out.println(new IndexRequest(indexName, typeName, hashMap2.get("id")).source(hashMap2));
//                bulkProcessor.add( new IndexRequest(tableName.toLowerCase(), "gzdc", hashMap2.get("S_GUID")).source(hashMap2));
                IndexRequest indexRequest = new IndexRequest(indexName, typeName, hashMap2.get("id"));
                indexRequest.source(hashMap2);
                bulkProcessor.add(indexRequest);
            }
            logger.info("-------------------------- Finally insert number total : " + count);
            // 将数据刷新到es, 注意这一步执行后并不会立即生效,取决于bulkProcessor设置的刷新时间
            bulkProcessor.flush();
        } catch (Exception e) {
            logger.error(e.getMessage());
        } finally {
            try {
                rs.close();
                ps.close();
                conn.close();
                boolean terminatedFlag = bulkProcessor.awaitClose(150L, TimeUnit.SECONDS);
               // client.close();
                logger.info(terminatedFlag);
            } catch (Exception e) {
                logger.error(e.getMessage());
            }
        }
    }
}

九、创建查询操作类

package com.dt.es.operater;

import com.dt.es.util.EsUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

/**
 * 类说明:ES查询操作
 */
public class QueryProcess {
    private static final Logger logger = LogManager.getLogger(QueryProcess.class);

    public EsUtil EsUtil = new EsUtil();

    public void matchQuery(String indexName, String typeName) {
        try {
            long startTime = System.currentTimeMillis();
            //单属性等值查询
            EsUtil.matchQuery(indexName, typeName, "vcname", "符合资助条件的救助对象参加城乡居民基本医疗保险个人");
            EsUtil.matchQuery(indexName, typeName, "ft", "缴费补贴");
            EsUtil.matchQuery(indexName, typeName, "taskcode", "11230400MB16745924323053600300101");
            logger.info(" use time: " + (System.currentTimeMillis() - startTime) / 1000 + "s");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

十、删除操作类

package com.dt.es.operater;

import com.dt.es.util.EsUtil;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;

import java.io.IOException;

/**
 * 类说明:ES删除操作
 */
public class RemoveProcess {

    public EsUtil EsUtil = new EsUtil();

    /**
     * 根据id删除索引
     * @param indexName 索引名称
     * @param id 数据ID
     */
    public void removeDataById(String indexName, String id) {
        RestHighLevelClient client = EsUtil.getRestHighLevelClient("127.0.0.1", 9200, "http");
        DeleteRequest deleteRequest = new DeleteRequest();
        deleteRequest.index(indexName);
        deleteRequest.id(id);
        try {
            client.delete(deleteRequest, RequestOptions.DEFAULT);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}


十一、创建测试controller类

package com.dt.es.web;

import com.dt.es.operater.BulkProcess;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

/**
 * 测试工具类
 */
@RestController
@RequestMapping("/es")
public class testController {

    /**
     * 插入测试方法
     * @param limit
     */
    @PostMapping("/insert")
        public void insert(String limit)
        {
            //将mysql 数据查出组装成es需要的map格式,通过批量写入es中
            String dbTableName = "t_robot";
            String indexName1 = "jcms_dict2";
            String typeName1 = "_doc";
            //批量插入
            BulkProcess bulk = new BulkProcess();
            bulk.writeMysqlDataToES(dbTableName, indexName1, typeName1,limit);
        }
}

十二、EsApplication

package com.dt.es;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class EsApplication {

    public static void main(String[] args) {
        /**
         * Web启动执行
         */
        SpringApplication.run(EsApplication.class, args);
      /* RUN执行

      //调用查询ES
       *//* QueryProcess qp = new QueryProcess();
        String indexName = "jcms_dict";
        String typeName = "_doc";
        qp.matchQuery(indexName, typeName);*//*

        //将mysql 数据查出组装成es需要的map格式,通过批量写入es中
        String dbTableName = "t_robot";
        String indexName1 = "jcms_dict2";
        String typeName1 = "_doc";
        //批量插入
        BulkProcess bulk = new BulkProcess();
        bulk.writeMysqlDataToES(dbTableName, indexName1, typeName1,"20");
        //删除
        *//* RemoveProcess rm = new RemoveProcess();
       rm.removeDataById("jcms_dict2", "2302"); */

    }

}

十三、测试

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值