windowls中jfinal整合es
es安装
1.es下载地址:https://www.elastic.co/downloads/past-releases
2.下载好es的zip包后解压缩
3.配置配置文件,打开config文件,对elasticsearch.yml文件进行编辑
给下列信息去掉#注释
node.name: elasticsearch
network.host: localhost
http.port: 9200
cluster.initial_master_nodes: ["node-1", "node-2"]
,并添加
http.cors.enabled: true
http.cors.allow-origin: "*"
node.master: true
node.data: true
3.打开cmd命令窗口,切换到es目录下的bin目录:输入elasticsearch启动es.(一开始双击启动bin目录下的elasticsearch-certgen.bat,但是一直提示ssl认证,找了好多方法都没有成功,无奈只能输入命令启动)
4.访问http://localhost:9200 显示下图信息则为成功
elasticsearch-head-master安装(实现页面可视化操作)
1.下载并解压到本地
2.打开黑窗口,跳转到解压文件下,输入npm start启动
3.启动成功访问http://localhost:9100
logstash安装(实现mysql与es实时更新)
1.下载并解压logstash
2.下载并解压好驱动mysql-connector-java
2.打开解压好的文件进入bin目录创建logstash.conf文件
3.编辑logstash.conf文件.
input {
# 多张表的同步只需要设置多个jdbc的模块就行了
jdbc {
# mysql 数据库链接,shop为数据库名
jdbc_connection_string => "jdbc:mysql://localhost:3306/sjk? useUnicode=true&characterEncoding=utf8&serverTimezone=UTC"
# 用户名和密码
jdbc_user => "root"
jdbc_password => "root"
# 驱动
jdbc_driver_library => "C:/Users/Aoli/Desktop/mysql-connector-java-5.1.46/mysql-connector-java-5.1.46.jar"
# 驱动类名
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_validate_connection => "true"
#是否分页
jdbc_paging_enabled => "true"
jdbc_page_size => "1000"
#时区
jdbc_default_timezone => "Asia/Shanghai"
#直接执行sql语句
statement => "select * from F_POST_TMP "
# 执行的sql 文件路径+名称
# statement_filepath => "/hw/elasticsearch/logstash-6.2.4/bin/test.sql"
#设置监听间隔 各字段含义(由左至右)分、时、天、月、年,全部为*默认含义为每分钟都更新
schedule => "* * * * *"
#每隔10分钟执行一次
#schedule => "*/10 * * * *"
#是否记录上次执行结果, 如果为真,将会把上次执行到的 tracking_column 字段的值记录下来,保存到last_run_metadata_path
record_last_run => true
#记录最新的同步的offset信息
last_run_metadata_path => "D:/es/logstash-7.3.2/logs/last_id.txt"
use_column_value => true
#递增字段的类型,numeric 表示数值类型, timestamp 表示时间戳类型
tracking_column_type => "numeric"
tracking_column => "id"
clean_run => false
# 索引类型
#type => "jdbc"
}
}
output {elasticsearch {
#es的ip和端口
hosts => ["http://localhost:9200"]
#ES索引名称(自己定义的)
index => "spider"
#文档类型
document_type => "_doc"
#设置数据的id为数据库中的字段
document_id => "%{id}"
}
stdout {
codec => json_lines
}}
jfinal整合es
1.在pom.xml文件中添加相关坐标(注意版本号要与es的一致)
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.9.1</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>7.9.1</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.7</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.7</version>
</dependency>
2.在配置文件中填写配置信息(要与es中config下的elasticsearch.yml中的信息对应)
#es配置信息
elasticsearch_ip=localhost
elasticsearch_port=9300
cluster_name=elasticsearch
3.编写ElasticSearchPlugin类实现IPlugin接口
import com.alibaba.fastjson.JSON;
import com.jfinal.config.Plugins;
import com.jfinal.kit.LogKit;
import com.jfinal.kit.PropKit;
import com.jfinal.plugin.IPlugin;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
public class ElasticSearchPlugin implements IPlugin{
private String ip;
private int port;
private String clusterName;
private static TransportClient client;
public ElasticSearchPlugin(String ip,int port,String clusterName){
this.ip = ip;
this.port = port;
this.clusterName = clusterName;
}
public static TransportClient getClient() {
return client;
}
@Override
public boolean start() {
Settings settings = Settings.builder()
//设置ES实例的名称
.put("cluster.name", this.clusterName)
//自动嗅探整个集群的状态,把集群中其他ES节点的ip添加到本地的客户端列表中
.put("client.transport.sniff", true)
.build();
/**
* 这里的连接方式指的是没有安装x-pack插件,如果安装了x-pack则参考{@link ElasticsearchXPackClient}
* 1. java客户端的方式是以tcp协议在9300端口上进行通信
* 2. http客户端的方式是以http协议在9200端口上进行通信
*/
try {
System.out.println("this.ip"+this.ip+"this.port"+this.port);
client = new PreBuiltTransportClient(settings).addTransportAddress(new TransportAddress(InetAddress.getByName(this.ip), this.port));
List<DiscoveryNode> nodes = client.connectedNodes();
if (nodes.isEmpty()) {
LogKit.info("No NODES Connected");
}else {
for (DiscoveryNode node : nodes){
LogKit.info("节点信息:"+node.getHostName()+node.getName()+node.getHostAddress());
}
}
LogKit.info("ElasticsearchClient 连接成功,节点包括:"+ JSON.toJSON(client.listedNodes()));
} catch (UnknownHostException e) {
LogKit.info(e.getMessage());
}
return true;
}
@Override
public boolean stop() {
if (client != null){
client.close();
}
return true;
}
}
4.然后就可以使用啦!!!
SearchRequestBuilder srb=ElasticSearchPlugin.getClient().prepareSearch("索引名称");
QueryBuilder queryBuilder=QueryBuilders.matchPhraseQuery("需要搜索的字段名称", “需要搜索的字符串”);
SearchResponse sr=srb.setQuery(QueryBuilders.boolQuery().must(queryBuilder)).execute().actionGet();
SearchHits hits=sr.getHits();
for(SearchHit hit:hits){
//输出获取到返回来的数据
System.out.println(hit.getSourceAsString());
}