mysql 字段转元数据_Apache Druid元数据从Derby转成MySQL

Druid元数据从Derby转成MySQL

默认情况下使用的是Derby数据库,Derby非常小巧,核心部分derby.jar只有2M,所以既可以做为单独的数据库服务器使用,也可以内嵌在应用程序中使用,但是Druid部署集群的模式下,或者Druid的单机规模数据很多的情况,这里就不适合使用Derby,测试情况下倒是挺合适的。

本文档就是把已经运行一段时间的Druid单机服务的元数据迁移到MySQL上,同时使用MySql当成元数据库

数据也是真实的,运行了几个小时,共有60250160条数据

把元数据导出到CSV文件中

安装MySQL数据库

配置好,可以允许远程登录 MySQL中的一些问题可以参考 在Linux下使用Mysql常见的问题集锦

创建一个数据库(demo)

CREATE USER 'demo'@'%' IDENTIFIED BY 'demo';

CREATE DATABASE demo DEFAULT CHARACTER SET utf8;

GRANT ALL PRIVILEGES ON *.* TO 'demo'@'%' WITH GRANT OPTION;

commit;

配置Druid的元数据管理信息

修改配置

vim DRUID_HOME/conf-quickstart/druid/_common/common.runtime.properties

druid.extensions.loadList=["druid-kafka-indexing-service","mysql-metadata-storage"]

# For Derby server on your Druid Coordinator (only viable in a cluster with a single Coordinator, no fail-over):

#druid.metadata.storage.type=derby

#druid.metadata.storage.connector.connectURI=jdbc:derby://localhost:1527/var/druid/metadata.db;create=true

#druid.metadata.storage.connector.host=localhost

#druid.metadata.storage.connector.port=1527

# For MySQL:

druid.metadata.storage.type=mysql

druid.metadata.storage.connector.connectURI=jdbc:mysql://192.168.1.91:3306/demo

druid.metadata.storage.connector.user=demo

druid.metadata.storage.connector.password=demo

下载MySQL的扩展包及的驱动包(默认当前版本没有MySQL的扩展包,到后面的版本默认就有MySQL扩展的包了)

cd /opt/install

wget http://static.druid.io/artifacts/releases/mysql-metadata-storage-0.12.0.tar.gz

tar -zxf mysql-metadata-storage-0.12.0.tar.gz -C druid-0.12.0/extensions/

ll druid-0.12.0/extensions/mysql-metadata-storage

-rw-r--r-- 1 501 games 983911 Apr 21 2016 mysql-connector-java-5.1.38.jar

-rw-r--r-- 1 501 games 10476 Mar 8 2018 mysql-metadata-storage-0.12.0.jar

上传从DerBy导出的CSV文件

默认上传到/var/lib/mysql-files/目录下,这样MySQL才有权限执行加载这个目录下的文件

[root@itdeer ~]# ll /var/lib/mysql-files/csv

total 116

-rw-r--r-- 1 root root 0 Jun 12 17:59 druid_config.csv

-rw-r--r-- 1 root root 0 Jun 12 17:59 druid_config_raw.csv

-rw-r--r-- 1 root root 172 Jun 12 17:59 druid_dataSource.csv

-rw-r--r-- 1 root root 246 Jun 12 17:59 druid_dataSource_raw.csv

-rw-r--r-- 1 root root 142 Jun 12 17:59 druid_rules.csv

-rw-r--r-- 1 root root 203 Jun 12 17:59 druid_rules_raw.csv

-rw-r--r-- 1 root root 32773 Jun 12 17:59 druid_segments.csv

-rw-r--r-- 1 root root 54426 Jun 12 17:59 druid_segments_raw.csv

-rw-r--r-- 1 root root 1654 Jun 12 17:59 druid_supervisors.csv

-rw-r--r-- 1 root root 2878 Jun 12 17:59 druid_supervisors_raw.csv

在导入数据之前需要有表

默认状态下Druid只要运行起来之后所有的元数据表都已经存在了。这里可以先运行一下Druid的coordinator服务,自动创建一下元数据表结构

或者使用一下命令创建也是可以的

/*

Navicat Premium Data Transfer

Source Server : node1

Source Server Type : MySQL

Source Server Version : 50721

Source Host : 192.168.1.91:3306

Source Schema : druid

Target Server Type : MySQL

Target Server Version : 50721

File Encoding : 65001

Date: 15/06/2020 15:34:26

*/

SET NAMES utf8mb4;

SET FOREIGN_KEY_CHECKS = 0;

-- ----------------------------

-- Table structure for druid_audit

-- ----------------------------

DROP TABLE IF EXISTS `druid_audit`;

CREATE TABLE `druid_audit` (

`id` bigint(20) NOT NULL AUTO_INCREMENT,

`audit_key` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`author` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`comment` varchar(2048) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_audit_key_time`(`audit_key`, `created_date`) USING BTREE,

INDEX `idx_druid_audit_type_time`(`type`, `created_date`) USING BTREE,

INDEX `idx_druid_audit_audit_time`(`created_date`) USING BTREE

) ENGINE = InnoDB AUTO_INCREMENT = 51 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_config

-- ----------------------------

DROP TABLE IF EXISTS `druid_config`;

CREATE TABLE `druid_config` (

`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`name`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_dataSource

-- ----------------------------

DROP TABLE IF EXISTS `druid_dataSource`;

CREATE TABLE `druid_dataSource` (

`dataSource` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`commit_metadata_payload` longblob NOT NULL,

`commit_metadata_sha1` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

PRIMARY KEY (`dataSource`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_pendingSegments

-- ----------------------------

DROP TABLE IF EXISTS `druid_pendingSegments`;

CREATE TABLE `druid_pendingSegments` (

`id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`dataSource` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`start` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`end` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`sequence_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`sequence_prev_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`sequence_name_prev_id_sha1` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`id`) USING BTREE,

UNIQUE INDEX `sequence_name_prev_id_sha1`(`sequence_name_prev_id_sha1`) USING BTREE,

INDEX `idx_druid_pendingSegments_datasource_end`(`dataSource`, `end`) USING BTREE,

INDEX `idx_druid_pendingSegments_datasource_sequence`(`dataSource`, `sequence_name`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_rules

-- ----------------------------

DROP TABLE IF EXISTS `druid_rules`;

CREATE TABLE `druid_rules` (

`id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`dataSource` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,

`version` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_rules_datasource`(`dataSource`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_segments

-- ----------------------------

DROP TABLE IF EXISTS `druid_segments`;

CREATE TABLE `druid_segments` (

`id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`dataSource` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`start` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`end` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`partitioned` tinyint(1) NOT NULL,

`version` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`used` tinyint(1) NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_segments_used`(`used`) USING BTREE,

INDEX `idx_druid_segments_datasource_used_end`(`dataSource`, `used`, `end`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_supervisors

-- ----------------------------

DROP TABLE IF EXISTS `druid_supervisors`;

CREATE TABLE `druid_supervisors` (

`id` bigint(20) NOT NULL AUTO_INCREMENT,

`spec_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`payload` longblob NOT NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_supervisors_spec_id`(`spec_id`) USING BTREE

) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_tasklocks

-- ----------------------------

DROP TABLE IF EXISTS `druid_tasklocks`;

CREATE TABLE `druid_tasklocks` (

`id` bigint(20) NOT NULL AUTO_INCREMENT,

`task_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,

`lock_payload` longblob NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_tasklocks_task_id`(`task_id`) USING BTREE

) ENGINE = InnoDB AUTO_INCREMENT = 55 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_tasklogs

-- ----------------------------

DROP TABLE IF EXISTS `druid_tasklogs`;

CREATE TABLE `druid_tasklogs` (

`id` bigint(20) NOT NULL AUTO_INCREMENT,

`task_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,

`log_payload` longblob NULL,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_tasklogs_task_id`(`task_id`) USING BTREE

) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

-- ----------------------------

-- Table structure for druid_tasks

-- ----------------------------

DROP TABLE IF EXISTS `druid_tasks`;

CREATE TABLE `druid_tasks` (

`id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`created_date` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,

`datasource` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,

`payload` longblob NOT NULL,

`status_payload` longblob NOT NULL,

`active` tinyint(1) NOT NULL DEFAULT 0,

PRIMARY KEY (`id`) USING BTREE,

INDEX `idx_druid_tasks_active_created_date`(`active`, `created_date`) USING BTREE

) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;

SET FOREIGN_KEY_CHECKS = 1;

导入文件

mysql -u demo -p

mysql> LOAD DATA INFILE '/var/lib/mysql-files/csv/druid_segments.csv' INTO TABLE druid_segments FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' (id,dataSource,created_date,start,end,partitioned,version,used,payload); SHOW WARNINGS;

Query OK, 41 rows affected (0.01 sec)

Records: 41 Deleted: 0 Skipped: 0 Warnings: 0

Empty set (0.00 sec)

mysql> LOAD DATA INFILE '/var/lib/mysql-files/csv/druid_rules.csv' INTO TABLE druid_rules FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' (id,dataSource,version,payload); SHOW WARNINGS;

Query OK, 1 row affected (0.00 sec)

Records: 1 Deleted: 0 Skipped: 0 Warnings: 0

Empty set (0.00 sec)

mysql> LOAD DATA INFILE '/var/lib/mysql-files/csv/druid_config.csv' INTO TABLE druid_config FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' (name,payload); SHOW WARNINGS;

Query OK, 0 rows affected (0.00 sec)

Records: 0 Deleted: 0 Skipped: 0 Warnings: 0

Empty set (0.00 sec)

mysql> LOAD DATA INFILE '/var/lib/mysql-files/csv/druid_dataSource.csv' INTO TABLE druid_dataSource FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' (dataSource,created_date,commit_metadata_payload,commit_metadata_sha1); SHOW WARNINGS;

Query OK, 1 row affected (0.01 sec)

Records: 1 Deleted: 0 Skipped: 0 Warnings: 0

Empty set (0.00 sec)

mysql> LOAD DATA INFILE '/var/lib/mysql-files/csv/druid_supervisors.csv' INTO TABLE druid_supervisors FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' (id,spec_id,created_date,payload); SHOW WARNINGS;

Query OK, 2 rows affected (0.00 sec)

Records: 2 Deleted: 0 Skipped: 0 Warnings: 0

Empty set (0.00 sec)

查看结果

访问界面(http://IP:8081) Druid DataDource Console界面(数据源和之前是一样的)

d2441e0c3220

20206122315

查询数据(数据条数也是正确的)

d2441e0c3220

202061223118

至此把Druid的元数据管理从DerBy数据库替换成MySQL数据库完成

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值