gitee仓库地址:
https://gitee.com/liushanshan126/elasticsearch-test.git
项目一、elk做日志搜索
原理图:
1、logback-spring.xml文件
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!--定义日志文件的存储地址,使用绝对路径-->
<property name="LOG_HOME" value="d:/logs"/>
<!-- Console 输出设置 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<fileNamePattern>${LOG_HOME}/log-%d{yyyy-MM-dd}.log</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 异步输出 -->
<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="FILE"/>
</appender>
<logger name="org.apache.ibatis.cache.decorators.LoggingCache" level="DEBUG" additivity="false">
<appender-ref ref="CONSOLE"/>
</logger>
<logger name="org.springframework.boot" level="DEBUG"/>
<root level="info"> <!--当日志等级为info的时候,进行下面2个操作-->
<!--<appender-ref ref="ASYNC"/>-->
<!--这一句话代表打印到FILE对象中的文件里面去-->
<appender-ref ref="FILE"/>
<!--这一句话代表打印到CONSOLE对象也就是控制台中去-->
<appender-ref ref="CONSOLE"/>
</root>
</configuration>
2、测试代码,将数据存入到文件中
3、保存到本次磁盘中的文件,logstash再获取,进行过滤,放入es中,kibana连接es,显示es中的数据
项目二、es做站内数据搜索功能
1.1 将mysql的数据通过logstash放入es中
第一步:mysql 数据配置:
第二步:logstash中.conf文件配置(重要):
mysql.conf文件:
input {
stdin {
}
jdbc {
jdbc_connection_string => "jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=true&serverTimezone=UTC"
# the user we wish to excute our statement as
jdbc_user => "root"
jdbc_password => root
# the path to our downloaded jdbc driver
jdbc_driver_library => "D:/elasticsearch7/mysql-connector-java-5.1.9.jar"
# the name of the driver class for mysql
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
#要执行的sql文件
#statement_filepath => "/conf/course.sql"
statement => "select * from course_pub where timestamp > date_add(:sql_last_value,INTERVAL 8 HOUR)"
#定时配置
schedule => "* * * * *"
record_last_run => true
last_run_metadata_path => "D:/elasticsearch7/logstash-7.3.0/config/logstash_metadata"
}
}
output {
elasticsearch {
#ES的ip地址和端口
hosts => "localhost:9200"
#hosts => ["localhost:9200"]
#ES索引库名称
index => "xc_course"
document_id => "%{id}"
document_type => "_doc"
template =>"D:/elasticsearch7/logstash-7.3.0/config/xc_course_template.json"
template_name =>"xc_course"
template_overwrite =>"true"
}
stdout {
#日志输出
codec => json_lines
}
}
xc_course_template.json文件(设置mapping)
{
"mappings" : {
"_doc" : {
"_source" : {
"excludes" : [ "teachplan", "description", "content" ]
},
"properties" : {
"content" : {
"analyzer" : "ik_max_word",
"type" : "text"
},
"description" : {
"copy_to" : [ "content" ],
"type" : "text"
},
"grade" : {
"type" : "keyword"
},
"id" : {
"type" : "keyword"
},
"mt" : {
"type" : "keyword"
},
"name" : {
"type" : "text",
"analyzer" : "ik_max_word"
},
"users" : {
"index" : false,
"type" : "text"
},
"charge" : {
"type" : "keyword"
},
"valid" : {
"type" : "keyword"
},
"pic" : {
"index" : false,
"type" : "keyword"
},
"qq" : {
"index" : false,
"type" : "keyword"
},
"price" : {
"type" : "float"
},
"price_old" : {
"type" : "float"
},
"st" : {
"type" : "keyword"
},
"status" : {
"type" : "keyword"
},
"studymodel" : {
"type" : "keyword"
},
"teachmode" : {
"type" : "keyword"
},
"teachplan" : {
"copy_to" : [ "content" ],
"type" : "text"
},
"expires" : {
"type" : "date",
"format": "yyyy-MM-dd HH:mm:ss"
},
"pub_time" : {
"type" : "date",
"format": "yyyy-MM-dd HH:mm:ss"
},
"start_time" : {
"type" : "date",
"format": "yyyy-MM-dd HH:mm:ss"
},
"end_time" : {
"type" : "date",
"format": "yyyy-MM-dd HH:mm:ss"
}
}
}
},
"settings" : {
"number_of_shards" : "1" ,
"number_of_replicas" : "0"
},
"template" : "xc_course"
}
设置logstash_metadata
— 2022-07-08 06:10:00.258000000 Z