Flink -sql -Mysql同步到ElasticSearch

1. 通过Flink Sql 将mysql 的数据同步到ElasticSearch 中

套路
在这里插入图片描述
官网示例:
官网地址:
https://ci.apache.org/projects/flink/flink-docs-release-1.12/dev/table/connectors/elasticsearch.html#document-type

CREATE TABLE myUserTable (
  user_id STRING,
  user_name STRING
  uv BIGINT,
  pv BIGINT,
  PRIMARY KEY (user_id) NOT ENFORCED
) WITH (
  'connector' = 'elasticsearch-7',
  'hosts' = 'http://localhost:9200',
  'index' = 'users'
);

连接的参数:
在这里插入图片描述

在这里插入图片描述

mysqlk 同步到Mysql 中 总结为:
准备环境 ----> 准备源表 -----> 准备目标表 ----> (查询原表插入目标表)

2. 加依赖

目前两个版本

<dependency>
  <groupId>org.apache.flink</groupId>
  <artifactId>flink-connector-elasticsearch7_2.11</artifactId>
  <version>1.12.3</version>
</dependency>

<dependency>
  <groupId>org.apache.flink</groupId>
  <artifactId>flink-connector-elasticsearch6_2.11</artifactId>
  <version>1.12.3</version>
</dependency>

3. 自己实现

3.1创建索引
#  创建索引
PUT /wudl_dbes
3.2 创建mapper 映射
#创建mapping
PUT /wudl_dbes/_mapping
{ 
  "properties": {
    "name": {
      "type": "text",
	  "index": true
    },
    "sex": {
      "type": "text",
	  "index": false
    },
    "age": {
      "type": "long",
      "index": false
    }
  }
}
3.3 插入数据
PUT /wudl_dbes/_doc/1
{
"name":"HFDS",
    "sex":"男",
    "age":18
}

PUT /wudl_dbes/_doc/2
{
"name":"HIVE",
    "sex":"女",
    "age":20
}
PUT /wudl_dbes/_doc/3
{
"name":"Flink",
    "sex":"女",
    "age":18
}


3.4 查询

GET /wudl_dbes/_doc/1
**************************************************

{
  "_index" : "wudl_dbes",
  "_type" : "_doc",
  "_id" : "1",
  "_version" : 1,
  "_seq_no" : 0,
  "_primary_term" : 1,
  "found" : true,
  "_source" : {
    "name" : "HFDS",
    "sex" : "男",
    "age" : 18
  }
}

GET /wudl_dbes/_search

********************************************

{
  "took" : 1,
  "timed_out" : false,
  "_shards" : {
    "total" : 1,
    "successful" : 1,
    "skipped" : 0,
    "failed" : 0
  },
  "hits" : {
    "total" : {
      "value" : 3,
      "relation" : "eq"
    },
    "max_score" : 1.0,
    "hits" : [
      {
        "_index" : "wudl_dbes",
        "_type" : "_doc",
        "_id" : "1",
        "_score" : 1.0,
        "_source" : {
          "name" : "HFDS",
          "sex" : "男",
          "age" : 18
        }
      },
      {
        "_index" : "wudl_dbes",
        "_type" : "_doc",
        "_id" : "2",
        "_score" : 1.0,
        "_source" : {
          "name" : "HIVE",
          "sex" : "女",
          "age" : 20
        }
      },
      {
        "_index" : "wudl_dbes",
        "_type" : "_doc",
        "_id" : "3",
        "_score" : 1.0,
        "_source" : {
          "name" : "Flink",
          "sex" : "女",
          "age" : 18
        }
      }
    ]
  }
}


4 mysql 数据结构

CREATE TABLE `myEs` (
  `id` int(64) DEFAULT NULL,
  `name` varchar(64) DEFAULT NULL,
  `sex` varchar(64) DEFAULT NULL,
  `age` int(64) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8

5. 我的代码实现

package com.wudl.flink.examples;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @ClassName : FlinkSqlMysqlToMySql
 * @Description : Flink sql-mysql
 * @Author :wudl
 * @Date: 2021-08-24 23:28
 */

public class FlinkSqlMysqlToElasticsearch {
    public static void main(String[] args) {

        String driverClass = "com.mysql.jdbc.Driver";

        String dbUrl = "jdbc:mysql://192.168.1.180:3306/MyFlink";
        String userNmae = "root";
        String passWord = "123456";
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().useBlinkPlanner().build();
        StreamTableEnvironment tableEvn = StreamTableEnvironment .create(env,settings);
        //1. 指定方言
        tableEvn.getConfig().setSqlDialect(SqlDialect.DEFAULT);
        String flink_sink_table = "myEs";

        TableResult inputTable = tableEvn.executeSql("CREATE TABLE  esTable (" +
                "id int ," +
                "name STRING ," +
                "sex STRING ," +
                "age int" +
                ") " +
                "WITH (" +
                "'connector' = 'elasticsearch-7'," +
                "'hosts' = 'http://node02.com:9200'," +
                "'index' = 'wudl_dbes'"+
                " )");
      TableResult outPutTable = tableEvn.executeSql("CREATE TABLE  sourceMySqlTable (" +
              "id int ," +
              "name STRING ," +
              "sex STRING ," +
              "age int " +
              ") " +
                "WITH (" +
                "'connector' = 'jdbc'," +
                "'url' = '" + dbUrl + "'," +
                "'table-name' = '"+flink_sink_table+"'," +
                " 'username' = '" + userNmae + "'," +
                " 'password' = '" + passWord + "'" +
                " )");

        String sql = " select id,name,sex, age  from sourceMySqlTable";
        Table ResultTable = tableEvn.sqlQuery(sql);

        tableEvn.executeSql("insert into esTable select id,name,sex,age  from "+ResultTable);



    }
}

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
好的,您可以按照以下的方式来编写docker-compose.yml文件: ``` version: '3.7' services: jobmanager: image: flink:1.17.0 command: jobmanager ports: - "8081:8081" taskmanager: image: flink:1.17.0 command: taskmanager depends_on: - jobmanager links: - jobmanager:jobmanager environment: - JOB_MANAGER_RPC_ADDRESS=jobmanager - FLINK_TASK_MANAGER_MEMORY_PROCESS_SIZE=1024m elasticsearch: image: docker.elastic.co/elasticsearch/elasticsearch:7.10.2 environment: - discovery.type=single-node ports: - "9200:9200" mysql: image: mysql:8.0 environment: - MYSQL_ROOT_PASSWORD=<your_password> - MYSQL_USER=<your_user> - MYSQL_PASSWORD=<your_password> - MYSQL_DATABASE=<your_database> ports: - "3306:3306" postgres: image: postgres:13.1 environment: - POSTGRES_USER=<your_user> - POSTGRES_PASSWORD=<your_password> - POSTGRES_DB=<your_database> ports: - "5432:5432" flink-job: image: flink:1.17.0 volumes: - ./flink-conf.yaml:/opt/flink/conf/flink-conf.yaml - ./lib/flink-sql-connector-elasticsearch7-3.0.1-1.17.jar:/opt/flink/lib/flink-sql-connector-elasticsearch7-3.0.1-1.17.jar - ./lib/flink-sql-connector-mysql-cdc-2.3.0.jar:/opt/flink/lib/flink-sql-connector-mysql-cdc-2.3.0.jar - ./lib/flink-sql-connector-postgres-cdc-2.3.0.jar:/opt/flink/lib/flink-sql-connector-postgres-cdc-2.3.0.jar command: jobmanager depends_on: - jobmanager - elasticsearch - mysql - postgres links: - elasticsearch:elasticsearch - mysql:mysql - postgres:postgres environment: - JOB_MANAGER_RPC_ADDRESS=jobmanager - FLINK_TM_HEAP_SIZE=1024m - FLINK_PROPERTIES= - FLINK_ENV_JAVA_OPTS= - FLINK_ENV_JAVA_TOOL_OPTIONS= - FLINK_CONF_DIR=/opt/flink/conf - FLINK_LIB_DIR=/opt/flink/lib - FLINK_PLUGINS_DIR=/opt/flink/plugins - FLINK_OPT_DIR=/opt/flink/opt - FLINK_LOG_DIR=/opt/flink/log - FLINK_HOME=/opt/flink volumes: - ./job.jar:/opt/flink/job.jar - ./flink-conf.yaml:/opt/flink/conf/flink-conf.yaml ``` 其中需要注意的是: 1. 在 `flink-job` 服务中,我们使用了 `volumes` 来挂载 `flink-conf.yaml` 和三个依赖的jar包到容器的 `/opt/flink/conf` 和 `/opt/flink/lib` 目录中。 2. 我们在 `flink-job` 服务的环境变量中设置了 `FLINK_PROPERTIES`,因为 Flink 在启动时会读取这个环境变量中的配置信息。 3. 我们在 `flink-job` 服务的环境变量中设置了 `FLINK_LIB_DIR`,这是因为 Flink 在启动时会从这个目录中加载依赖的jar包。 4. 我们在 `flink-job` 服务的环境变量中设置了 `FLINK_CONF_DIR`,这是因为 Flink 在启动时会读取这个目录中的配置文件。 5. 我们在 `flink-job` 服务的环境变量中设置了 `FLINK_HOME`,这是因为 Flink 在启动时需要知道它的安装路径。 上述的docker-compose.yml文件可以满足您的要求,在启动容器后,您可以使用以下的命令来进入到flink-job容器中: ``` docker-compose run flink-job /bin/bash ``` 然后,您可以在容器中运行Flink任务了。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值