kettle连接达梦资源库-达梦资源库初始化SQL

kettle连接达梦资源库经常性出现连接成功,但是未自动初始化SQL。所以这里特地提供可用的SQL。本人已经在DM8内亲测成功,已正常使用。

直接上文(复制SQL语句,直接在达梦数据库运行即可):


create table R_CLUSTER
(
  id_cluster             INTEGER not null,
  name                   VARCHAR2(255),
  base_port              VARCHAR2(255),
  sockets_buffer_size    VARCHAR2(255),
  sockets_flush_interval VARCHAR2(255),
  sockets_compressed     CHAR(1),
  dynamic_cluster        CHAR(1)
)
;
alter table R_CLUSTER
  add primary key (ID_CLUSTER);


create table R_CLUSTER_SLAVE
(
  id_cluster_slave INTEGER not null,
  id_cluster       INTEGER,
  id_slave         INTEGER
)
;
alter table R_CLUSTER_SLAVE
  add primary key (ID_CLUSTER_SLAVE);


create table R_CONDITION
(
  id_condition        INTEGER not null,
  id_condition_parent INTEGER,
  negated             CHAR(1),
  operator            VARCHAR2(255),
  left_name           VARCHAR2(255),
  condition_function  VARCHAR2(255),
  right_name          VARCHAR2(255),
  id_value_right      INTEGER
)
;
alter table R_CONDITION
  add primary key (ID_CONDITION);

create table R_DATABASE
(
  id_database         INTEGER not null,
  name                VARCHAR2(255),
  id_database_type    INTEGER,
  id_database_contype INTEGER,
  host_name           VARCHAR2(255),
  port                INTEGER,
  username            VARCHAR2(255),
  password            VARCHAR2(255),
  servername          VARCHAR2(255),
  data_tbs            VARCHAR2(255),
  index_tbs           VARCHAR2(255),
  database_name       VARCHAR2(255)
)
;
alter table R_DATABASE
  add primary key (ID_DATABASE);


create table R_DATABASE_ATTRIBUTE
(
  id_database_attribute INTEGER not null,
  id_database           INTEGER,
  code                  VARCHAR2(255),
  value_str             VARCHAR2(255)
)
;
create unique index IDX_RDAT on R_DATABASE_ATTRIBUTE (ID_DATABASE, CODE);
alter table R_DATABASE_ATTRIBUTE
  add primary key (ID_DATABASE_ATTRIBUTE);

create table R_DATABASE_CONTYPE
(
  id_database_contype INTEGER not null,
  code                VARCHAR2(255),
  description         VARCHAR2(255)
)
;
alter table R_DATABASE_CONTYPE
  add primary key (ID_DATABASE_CONTYPE);

create table R_DATABASE_TYPE
(
  id_database_type INTEGER not null,
  code             VARCHAR2(255),
  description      VARCHAR2(255)
)
;
alter table R_DATABASE_TYPE
  add primary key (ID_DATABASE_TYPE);


create table R_DEPENDENCY
(
  id_dependency     INTEGER not null,
  id_transformation INTEGER,
  id_database       INTEGER,
  table_name        VARCHAR2(255),
  field_name        VARCHAR2(255)
)
;
alter table R_DEPENDENCY
  add primary key (ID_DEPENDENCY);


create table R_DIRECTORY
(
  id_directory        INTEGER not null,
  id_directory_parent INTEGER,
  directory_name      VARCHAR2(255)
)
;
create unique index IDX_RDIR on R_DIRECTORY (ID_DIRECTORY_PARENT, DIRECTORY_NAME);
alter table R_DIRECTORY
  add primary key (ID_DIRECTORY);


create table R_ELEMENT
(
  id_element      INTEGER not null,
  id_element_type INTEGER,
  name            VARCHAR2(1999)
)
;
alter table R_ELEMENT
  add primary key (ID_ELEMENT);


create table R_ELEMENT_ATTRIBUTE
(
  id_element_attribute        INTEGER not null,
  id_element                  INTEGER,
  id_element_attribute_parent INTEGER,
  attr_key                    VARCHAR2(255),
  attr_value                  VARCHAR2(255)
)
;
alter table R_ELEMENT_ATTRIBUTE
  add primary key (ID_ELEMENT_ATTRIBUTE);


create table R_ELEMENT_TYPE
(
  id_element_type INTEGER not null,
  id_namespace    INTEGER,
  name            VARCHAR2(1999),
  description     VARCHAR2(255)
)
;
alter table R_ELEMENT_TYPE
  add primary key (ID_ELEMENT_TYPE);

create table R_JOB
(
  id_job               INTEGER not null,
  id_directory         INTEGER,
  name                 VARCHAR2(255),
  job_version          VARCHAR2(255),
  job_status           INTEGER,
  id_database_log      INTEGER,
  table_name_log       VARCHAR2(255),
  created_user         VARCHAR2(255),
  created_date         DATE,
  modified_user        VARCHAR2(255),
  modified_date        DATE,
  use_batch_id         CHAR(1),
  pass_batch_id        CHAR(1),
  use_logfield         CHAR(1),
  shared_file          VARCHAR2(255),
  description          VARCHAR2(255),
  extended_description VARCHAR2(255)
)
;
alter table R_JOB
  add primary key (ID_JOB);


create table R_JOBENTRY
(
  id_jobentry      INTEGER not null,
  id_job           INTEGER,
  id_jobentry_type INTEGER,
  name             VARCHAR2(255),
  description      VARCHAR2(255)
)
;
alter table R_JOBENTRY
  add primary key (ID_JOBENTRY);


create table R_JOBENTRY_ATTRIBUTE
(
  id_jobentry_attribute INTEGER not null,
  id_job                INTEGER,
  id_jobentry           INTEGER,
  nr                    INTEGER,
  code                  VARCHAR2(255),
  value_num             NUMBER(13,2),
  value_str             VARCHAR2(255)
)
;
create unique index IDX_RJEA on R_JOBENTRY_ATTRIBUTE (ID_JOBENTRY_ATTRIBUTE, CODE, NR);
alter table R_JOBENTRY_ATTRIBUTE
  add primary key (ID_JOBENTRY_ATTRIBUTE);


create table R_JOBENTRY_COPY
(
  id_jobentry_copy INTEGER not null,
  id_jobentry      INTEGER,
  id_job           INTEGER,
  id_jobentry_type INTEGER,
  nr               INTEGER,
  gui_location_x   INTEGER,
  gui_location_y   INTEGER,
  gui_draw         CHAR(1),
  parallel         CHAR(1)
)
;
alter table R_JOBENTRY_COPY
  add primary key (ID_JOBENTRY_COPY);


create table R_JOBENTRY_DATABASE
(
  id_job      INTEGER,
  id_jobentry INTEGER,
  id_database INTEGER
)
;
create index IDX_RJD1 on R_JOBENTRY_DATABASE (ID_JOB);
create index IDX_RJD2 on R_JOBENTRY_DATABASE (ID_DATABASE);


create table R_JOBENTRY_TYPE
(
  id_jobentry_type INTEGER not null,
  code             VARCHAR2(255),
  description      VARCHAR2(255)
)
;
alter table R_JOBENTRY_TYPE
  add primary key (ID_JOBENTRY_TYPE);


create table R_JOB_ATTRIBUTE
(
  id_job_attribute INTEGER not null,
  id_job           INTEGER,
  nr               INTEGER,
  code             VARCHAR2(255),
  value_num        INTEGER,
  value_str        VARCHAR2(255)
)
;
create unique index IDX_JATT on R_JOB_ATTRIBUTE (ID_JOB, CODE, NR);
alter table R_JOB_ATTRIBUTE
  add primary key (ID_JOB_ATTRIBUTE);

create table R_JOB_HOP
(
  id_job_hop            INTEGER not null,
  id_job                INTEGER,
  id_jobentry_copy_from INTEGER,
  id_jobentry_copy_to   INTEGER,
  enabled               CHAR(1),
  evaluation            CHAR(1),
  unconditional         CHAR(1)
)
;
alter table R_JOB_HOP
  add primary key (ID_JOB_HOP);


create table R_JOB_LOCK
(
  id_job_lock  INTEGER not null,
  id_job       INTEGER,
  id_user      INTEGER,
  lock_date    DATE,
  lock_message VARCHAR2(1000)
)
;
alter table R_JOB_LOCK
  add primary key (ID_JOB_LOCK);


create table R_JOB_NOTE
(
  id_job  INTEGER,
  id_note INTEGER
)
;


create table R_LOG
(
  id_log          INTEGER not null,
  name            VARCHAR2(255),
  id_loglevel     INTEGER,
  logtype         VARCHAR2(255),
  filename        VARCHAR2(255),
  fileextention   VARCHAR2(255),
  add_date        CHAR(1),
  add_time        CHAR(1),
  id_database_log INTEGER,
  table_name_log  VARCHAR2(255)
)
;
alter table R_LOG
  add primary key (ID_LOG);


create table R_LOGLEVEL
(
  id_loglevel INTEGER not null,
  code        VARCHAR2(255),
  description VARCHAR2(255)
)
;
alter table R_LOGLEVEL
  add primary key (ID_LOGLEVEL);

create table R_NAMESPACE
(
  id_namespace INTEGER not null,
  name         VARCHAR2(1999)
)
;
alter table R_NAMESPACE
  add primary key (ID_NAMESPACE);


create table R_NOTE
(
  id_note                      INTEGER not null,
  gui_location_x               INTEGER,
  gui_location_y               INTEGER,
  gui_location_width           INTEGER,
  gui_location_height          INTEGER,
  font_size                    INTEGER,
  font_bold                    CHAR(1),
  font_italic                  CHAR(1),
  font_color_red               INTEGER,
  font_color_green             INTEGER,
  font_color_blue              INTEGER,
  font_back_ground_color_red   INTEGER,
  font_back_ground_color_green INTEGER,
  font_back_ground_color_blue  INTEGER,
  font_border_color_red        INTEGER,
  font_border_color_green      INTEGER,
  font_border_color_blue       INTEGER,
  draw_shadow                  CHAR(1),
  value_str                    VARCHAR2(1000),
  font_name                    VARCHAR2(1000)
)
;
alter table R_NOTE
  add primary key (ID_NOTE);


create table R_PARTITION
(
  id_partition        INTEGER not null,
  id_partition_schema INTEGER,
  partition_id        VARCHAR2(255)
)
;
alter table R_PARTITION
  add primary key (ID_PARTITION);


create table R_PARTITION_SCHEMA
(
  id_partition_schema  INTEGER not null,
  name                 VARCHAR2(255),
  dynamic_definition   CHAR(1),
  partitions_per_slave VARCHAR2(255)
)
;
alter table R_PARTITION_SCHEMA
  add primary key (ID_PARTITION_SCHEMA);

create table R_REPOSITORY_LOG
(
  id_repository_log INTEGER not null,
  rep_version       VARCHAR2(255),
  log_date          DATE,
  log_user          VARCHAR2(255),
  operation_desc    VARCHAR2(1000)
)
;
alter table R_REPOSITORY_LOG
  add primary key (ID_REPOSITORY_LOG);


create table R_SLAVE
(
  id_slave        INTEGER not null,
  name            VARCHAR2(255),
  host_name       VARCHAR2(255),
  port            VARCHAR2(255),
  web_app_name    VARCHAR2(255),
  username        VARCHAR2(255),
  password        VARCHAR2(255),
  proxy_host_name VARCHAR2(255),
  proxy_port      VARCHAR2(255),
  non_proxy_hosts VARCHAR2(255),
  master          CHAR(1)
)
;
alter table R_SLAVE
  add primary key (ID_SLAVE);


create table R_STEP
(
  id_step           INTEGER not null,
  id_transformation INTEGER,
  name              VARCHAR2(255),
  id_step_type      INTEGER,
  distribute        CHAR(1),
  copies            INTEGER,
  gui_location_x    INTEGER,
  gui_location_y    INTEGER,
  gui_draw          CHAR(1),
  copies_string     VARCHAR2(255),
  description       VARCHAR2(1000)
)
;
alter table R_STEP
  add primary key (ID_STEP);

create table R_STEP_ATTRIBUTE
(
  id_step_attribute INTEGER not null,
  id_transformation INTEGER,
  id_step           INTEGER,
  nr                INTEGER,
  code              VARCHAR2(255),
  value_num         INTEGER,
  value_str         VARCHAR2(1000)
)
;
create unique index IDX_RSAT on R_STEP_ATTRIBUTE (ID_STEP, CODE, NR);
alter table R_STEP_ATTRIBUTE
  add primary key (ID_STEP_ATTRIBUTE);


create table R_STEP_DATABASE
(
  id_transformation INTEGER,
  id_step           INTEGER,
  id_database       INTEGER
)
;
create index IDX_RSD1 on R_STEP_DATABASE (ID_TRANSFORMATION);
create index IDX_RSD2 on R_STEP_DATABASE (ID_DATABASE);


create table R_STEP_TYPE
(
  id_step_type INTEGER not null,
  code         VARCHAR2(255),
  description  VARCHAR2(255),
  helptext     VARCHAR2(255)
)
;
alter table R_STEP_TYPE
  add primary key (ID_STEP_TYPE);


create table R_TRANSFORMATION
(
  id_transformation    INTEGER not null,
  id_directory         INTEGER,
  name                 VARCHAR2(255),
  trans_version        VARCHAR2(255),
  trans_status         INTEGER,
  id_step_read         INTEGER,
  id_step_write        INTEGER,
  id_step_input        INTEGER,
  id_step_output       INTEGER,
  id_step_update       INTEGER,
  id_database_log      INTEGER,
  table_name_log       VARCHAR2(255),
  use_batchid          CHAR(1),
  use_logfield         CHAR(1),
  id_database_maxdate  INTEGER,
  table_name_maxdate   VARCHAR2(255),
  field_name_maxdate   VARCHAR2(255),
  offset_maxdate       NUMBER(12,2),
  diff_maxdate         NUMBER(12,2),
  created_user         VARCHAR2(255),
  created_date         DATE,
  modified_user        VARCHAR2(255),
  modified_date        DATE,
  size_rowset          INTEGER,
  description          VARCHAR2(255),
  extended_description VARCHAR2(255)
)
;
alter table R_TRANSFORMATION
  add primary key (ID_TRANSFORMATION);


create table R_TRANS_ATTRIBUTE
(
  id_trans_attribute INTEGER not null,
  id_transformation  INTEGER,
  nr                 INTEGER,
  code               VARCHAR2(255),
  value_num          INTEGER,
  value_str          VARCHAR2(1000)
)
;
create unique index IDX_TATT on R_TRANS_ATTRIBUTE (ID_TRANSFORMATION, CODE, NR);
alter table R_TRANS_ATTRIBUTE
  add primary key (ID_TRANS_ATTRIBUTE);


create table R_TRANS_CLUSTER
(
  id_trans_cluster  INTEGER not null,
  id_transformation INTEGER,
  id_cluster        INTEGER
)
;
alter table R_TRANS_CLUSTER
  add primary key (ID_TRANS_CLUSTER);


create table R_TRANS_HOP
(
  id_trans_hop      INTEGER not null,
  id_transformation INTEGER,
  id_step_from      INTEGER,
  id_step_to        INTEGER,
  enabled           CHAR(1)
)
;
alter table R_TRANS_HOP
  add primary key (ID_TRANS_HOP);


create table R_TRANS_LOCK
(
  id_trans_lock     INTEGER not null,
  id_transformation INTEGER,
  id_user           INTEGER,
  lock_date         DATE,
  lock_message      VARCHAR2(1000)
)
;
alter table R_TRANS_LOCK
  add primary key (ID_TRANS_LOCK);


create table R_TRANS_NOTE
(
  id_transformation INTEGER,
  id_note           INTEGER
)
;


create table R_TRANS_PARTITION_SCHEMA
(
  id_trans_partition_schema INTEGER not null,
  id_transformation         INTEGER,
  id_partition_schema       INTEGER
)
;
alter table R_TRANS_PARTITION_SCHEMA
  add primary key (ID_TRANS_PARTITION_SCHEMA);

create table R_TRANS_SLAVE
(
  id_trans_slave    INTEGER not null,
  id_transformation INTEGER,
  id_slave          INTEGER
)
;
alter table R_TRANS_SLAVE
  add primary key (ID_TRANS_SLAVE);


create table R_TRANS_STEP_CONDITION
(
  id_transformation INTEGER,
  id_step           INTEGER,
  id_condition      INTEGER
)
;

create table R_USER
(
  id_user     INTEGER not null,
  login       VARCHAR2(255),
  password    VARCHAR2(255),
  name        VARCHAR2(255),
  description VARCHAR2(255),
  enabled     CHAR(1)
)
;
alter table R_USER
  add primary key (ID_USER);


create table R_VALUE
(
  id_value   INTEGER not null,
  name       VARCHAR2(255),
  value_type VARCHAR2(255),
  value_str  VARCHAR2(255),
  is_null    CHAR(1)
)
;
alter table R_VALUE
  add primary key (ID_VALUE);


create table R_VERSION
(
  id_version    INTEGER not null,
  major_version INTEGER,
  minor_version INTEGER,
  upgrade_date  DATE,
  is_upgrade    CHAR(1)
)
;
alter table R_VERSION
  add primary key (ID_VERSION);


insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (1, 'Native', 'Native (JDBC)');
insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (2, 'ODBC', 'ODBC');
insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (3, 'OCI', 'OCI');
insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (4, 'Plugin', 'Plugin specific access method');
insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (5, 'JNDI', 'JNDI');
insert into R_DATABASE_CONTYPE (id_database_contype, code, description)
values (6, ',', 'Custom');
commit;

insert into R_DATABASE_TYPE (id_database_type, code, description)
values (1, 'DERBY', 'Apache Derby');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (2, 'AS/400', 'AS/400');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (3, 'INTERBASE', 'Borland Interbase');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (4, 'INFINIDB', 'Calpont InfiniDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (5, 'IMPALASIMBA', 'Cloudera Impala');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (6, 'DBASE', 'dBase III, IV or 5');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (7, 'EXASOL4', 'Exasol 4');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (8, 'EXTENDB', 'ExtenDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (9, 'FIREBIRD', 'Firebird SQL');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (10, 'GENERIC', 'Generic database');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (11, 'GOOGLEBIGQUERY', 'Google BigQuery');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (12, 'GREENPLUM', 'Greenplum');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (13, 'SQLBASE', 'Gupta SQL Base');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (14, 'H2', 'H2');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (15, 'HIVE', 'Hadoop Hive');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (16, 'HIVE2', 'Hadoop Hive 2/3');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (17, 'HIVEWAREHOUSE', 'Hive Warehouse Connector');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (18, 'HYPERSONIC', 'Hypersonic');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (19, 'DB2', 'IBM DB2');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (20, 'IMPALA', 'Impala');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (21, 'INFOBRIGHT', 'Infobright');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (22, 'INFORMIX', 'Informix');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (23, 'INGRES', 'Ingres');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (24, 'VECTORWISE', 'Ingres VectorWise');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (25, 'CACHE', 'Intersystems Cache');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (26, 'KINGBASEES', 'KingbaseES');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (27, 'LucidDB', 'LucidDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (28, 'MARIADB', 'MariaDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (29, 'SAPDB', 'MaxDB (SAP DB)');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (30, 'MONETDB', 'MonetDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (31, 'MSACCESS', 'MS Access');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (32, 'MSSQL', 'MS SQL Server');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (33, 'MSSQLNATIVE', 'MS SQL Server (Native)');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (34, 'MYSQL', 'MySQL');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (35, 'MONDRIAN', 'Native Mondrian');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (36, 'NEOVIEW', 'Neoview');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (37, 'NETEZZA', 'Netezza');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (38, 'ORACLE', 'Oracle');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (39, 'ORACLERDB', 'Oracle RDB');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (40, 'PALO', 'Palo MOLAP Server');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (41, 'KettleThin', 'Pentaho Data Services');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (42, 'POSTGRESQL', 'PostgreSQL');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (43, 'REDSHIFT', 'Redshift');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (44, 'REMEDY-AR-SYSTEM', 'Remedy Action Request System');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (45, 'SAPR3', 'SAP ERP System');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (46, 'SNOWFLAKEHV', 'Snowflake');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (47, 'SPARKSIMBA', 'SparkSQL');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (48, 'SQLITE', 'SQLite');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (49, 'SYBASE', 'Sybase');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (50, 'SYBASEIQ', 'SybaseIQ');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (51, 'TERADATA', 'Teradata');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (52, 'UNIVERSE', 'UniVerse database');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (53, 'VERTICA', 'Vertica');
insert into R_DATABASE_TYPE (id_database_type, code, description)
values (54, 'VERTICA5', 'Vertica 5+');
commit;

insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (1, 'EMRJobExecutorPlugin', 'Amazon EMR job executor');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (2, 'HiveJobExecutorPlugin', 'Amazon Hive job executor');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (3, 'DataRefineryBuildModel', 'Build model');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (4, 'CHECK_DB_CONNECTIONS', 'Check DB connections');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (5, 'XML_WELL_FORMED', 'Check if XML file is well formed');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (6, 'DOS_UNIX_CONVERTER', 'DOS和UNIX之间的文本转换');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (7, 'DTD_VALIDATOR', 'DTD validator');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (8, 'DummyJob', 'Example job (deprecated)');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (9, 'FTP_PUT', 'FTP 上传');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (10, 'FTP', 'FTP 下载');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (11, 'FTP_DELETE', 'FTP 删除');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (12, 'FTPS_PUT', 'FTPS 上传');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (13, 'FTPS_GET', 'FTPS 下载');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (14, 'HadoopCopyFilesPlugin', 'Hadoop copy files');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (15, 'HadoopJobExecutorPlugin', 'Hadoop job executor ');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (16, 'HL7MLLPAcknowledge', 'HL7 MLLP acknowledge');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (17, 'HL7MLLPInput', 'HL7 MLLP input');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (18, 'HTTP', 'HTTP');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (19, 'EVAL', 'JavaScript');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (20, 'MS_ACCESS_BULK_LOAD', 'MS Access bulk load (deprecated)');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (21, 'MYSQL_BULK_LOAD', 'MySQL 批量加载');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (22, 'OozieJobExecutor', 'Oozie job executor');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (23, 'PALO_CUBE_CREATE', 'Palo cube create (deprecated)');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (24, 'PALO_CUBE_DELETE', 'Palo cube delete (deprecated)');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (25, 'HadoopTransJobExecutorPlugin', 'Pentaho MapReduce');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (26, 'HadoopPigScriptExecutorPlugin', 'Pig script executor');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (27, 'PING', 'Ping 一台主机');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (28, 'GET_POP', 'POP 收信');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (29, 'DATASOURCE_PUBLISH', 'Publish model');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (30, 'SFTPPUT', 'SFTP 上传');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (31, 'SFTP', 'SFTP 下载');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (32, 'SHELL', 'Shell');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (33, 'SparkSubmit', 'Spark submit');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (34, 'SQL', 'SQL');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (35, 'MSSQL_BULK_LOAD', 'SQLServer 批量加载');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (36, 'SqoopExport', 'Sqoop export');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (37, 'SqoopImport', 'Sqoop import');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (38, 'TALEND_JOB_EXEC', 'Talend 作业执行 (deprecated)');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (39, 'XSD_VALIDATOR', 'XSD validator');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (40, 'XSLT', 'XSL transformation');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (41, 'ZIP_FILE', 'Zip 压缩文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (42, 'ABORT', '中止作业');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (43, 'MYSQL_BULK_FILE', '从 MySQL 批量导出到文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (44, 'DELETE_RESULT_FILENAMES', '从结果文件中删除文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (45, 'JOB', '作业');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (46, 'WRITE_TO_FILE', '写入文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (47, 'WRITE_TO_LOG', '写日志');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (48, 'CREATE_FOLDER', '创建一个目录');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (49, 'CREATE_FILE', '创建文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (50, 'DELETE_FILE', '删除一个文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (51, 'DELETE_FILES', '删除多个文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (52, 'DELETE_FOLDERS', '删除目录');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (53, 'SNMP_TRAP', '发送 SNMP 自陷');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (54, 'SEND_NAGIOS_PASSIVE_CHECK', '发送Nagios 被动检查');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (55, 'MAIL', '发送邮件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (56, 'COPY_MOVE_RESULT_FILENAMES', '复制/移动结果文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (57, 'COPY_FILES', '复制文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (58, 'EXPORT_REPOSITORY', '导出资源库到XML文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (59, 'SUCCESS', '成功');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (60, 'MSGBOX_INFO', '显示消息对话框');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (61, 'WEBSERVICE_AVAILABLE', '检查web服务是否可用');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (62, 'FILE_EXISTS', '检查一个文件是否存在');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (63, 'COLUMNS_EXIST', '检查列是否存在');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (64, 'FILES_EXIST', '检查多个文件是否存在');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (65, 'CHECK_FILES_LOCKED', '检查文件是否被锁');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (66, 'CONNECTED_TO_REPOSITORY', '检查是否连接到资源库');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (67, 'FOLDER_IS_EMPTY', '检查目录是否为空');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (68, 'TABLE_EXISTS', '检查表是否存在');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (69, 'SIMPLE_EVAL', '检验字段的值');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (70, 'FILE_COMPARE', '比较文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (71, 'FOLDERS_COMPARE', '比较目录');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (72, 'ADD_RESULT_FILENAMES', '添加文件到结果文件中');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (73, 'TRUNCATE_TABLES', '清空表');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (74, 'SPECIAL', '特殊作业项');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (75, 'SYSLOG', '用 syslog 发送信息');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (76, 'PGP_ENCRYPT_FILES', '用PGP加密文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (77, 'PGP_DECRYPT_FILES', '用PGP解密文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (78, 'PGP_VERIFY_FILES', '用PGP验证文件签名');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (79, 'MOVE_FILES', '移动文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (80, 'DELAY', '等待');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (81, 'WAIT_FOR_SQL', '等待SQL');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (82, 'WAIT_FOR_FILE', '等待文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (83, 'UNZIP', '解压缩文件');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (84, 'EVAL_FILES_METRICS', '计算文件大小或个数');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (85, 'EVAL_TABLE_CONTENT', '计算表中的记录数');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (86, 'SET_VARIABLES', '设置变量');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (87, 'TRANS', '转换');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (88, 'TELNET', '远程登录一台主机');
insert into R_JOBENTRY_TYPE (id_jobentry_type, code, description)
values (89, 'MAIL_VALIDATOR', '邮件验证');
commit;

insert into R_LOGLEVEL (id_loglevel, code, description)
values (1, 'Error', '错误日志');
insert into R_LOGLEVEL (id_loglevel, code, description)
values (2, 'Minimal', '最小日志');
insert into R_LOGLEVEL (id_loglevel, code, description)
values (3, 'Basic', '基本日志');
insert into R_LOGLEVEL (id_loglevel, code, description)
values (4, 'Detailed', '详细日志');
insert into R_LOGLEVEL (id_loglevel, code, description)
values (5, 'Debug', '调试');
insert into R_LOGLEVEL (id_loglevel, code, description)
values (6, 'Rowlevel', '行级日志(非常详细)');
commit;

insert into R_REPOSITORY_LOG (id_repository_log, rep_version, log_date, log_user, operation_desc)
values (1, '5.0', to_date('27-03-2023 15:03:36', 'dd-mm-yyyy hh24:mi:ss'), 'admin', null);
commit;

insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (82, 'PGPDecryptStream', 'PGP decrypt stream', 'Decrypt data stream with PGP');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (83, 'PGPEncryptStream', 'PGP encrypt stream', 'Encrypt data stream with PGP');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (84, 'PGBulkLoader', 'PostgreSQL 批量加载', 'PostgreSQL Bulk Loader');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (85, 'Rest', 'REST client', 'Consume RESTfull services.' || chr(10) || 'REpresentational State Transfer (REST) is a key design idiom that embraces a stateless client-server' || chr(10) || 'architecture in which the web services are viewed as resources and can be identified by their URLs');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (86, 'RssInput', 'RSS 输入', 'Read RSS feeds');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (87, 'RssOutput', 'RSS 输出', 'Read RSS stream.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (88, 'RuleAccumulator', 'Rules accumulator', 'Rules accumulator step');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (89, 'RuleExecutor', 'Rules executor', 'Rules executor step');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (90, 'S3CSVINPUT', 'S3 CSV input', 'Is capable of reading CSV data stored on Amazon S3 in parallel');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (91, 'S3FileOutputPlugin', 'S3 file output', 'Create files in an S3 location');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (92, 'SalesforceDelete', 'Salesforce delete', 'Delete records in Salesforce module.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (93, 'SalesforceInput', 'Salesforce input', 'Extract data from Salesforce');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (94, 'SalesforceInsert', 'Salesforce insert', 'Insert records in Salesforce module.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (95, 'SalesforceUpdate', 'Salesforce update', 'Update records in Salesforce module.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (96, 'SalesforceUpsert', 'Salesforce upsert', 'Insert or update records in Salesforce module.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (97, 'SAPINPUT', 'SAP input', 'Read data from SAP ERP, optionally with parameters');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (98, 'SASInput', 'SAS 输入', 'This step reads files in sas7bdat (SAS) native format');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (99, 'Script', 'Script', 'Calculate values by scripting in Ruby, Python, Groovy, JavaScript, ... (JSR-223)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (100, 'SetSessionVariableStep', 'Set session variables', 'Set session variables in the current user session.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (101, 'SFTPPut', 'SFTP put', 'Upload a file or a stream file to remote host via SFTP');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (102, 'CreateSharedDimensions', 'Shared dimension', 'Create shared dimensions for use with Streamlined Data Refinery.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (103, 'SimpleMapping', 'Simple mapping (sub-transformation)', 'Run a mapping (sub-transformation), use MappingInput and MappingOutput to specify the fields interface.  This is the simplified version only allowing one input and one output data set.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (104, 'SingleThreader', 'Single threader', 'Executes a transformation snippet in a single thread.  You need a standard mapping or a transformation with an Injector step where data from the parent transformation will arive in blocks.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (105, 'SocketWriter', 'Socket 写', 'Socket writer.  A socket server that can send rows of data to a socket reader.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (106, 'SocketReader', 'Socket 读', 'Socket reader.  A socket client that connects to a server (Socket Writer step).');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (107, 'SQLFileOutput', 'SQL 文件输出', 'Output SQL INSERT statements to file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (108, 'SSTableOutput', 'SSTable output', 'Writes to a filesystem directory as a Cassandra SSTable');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (109, 'SwitchCase', 'Switch / case', 'Switch a row to a certain target step based on the case value in a field.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (110, 'TableAgileMart', 'Table Agile Mart', 'Load data into a table for Agile BI use cases');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (111, 'TeraFast', 'Teradata Fastload 批量加载', 'The Teradata Fastload bulk loader');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (112, 'TeraDataBulkLoader', 'Teradata TPT bulk loader', 'Teradata TPT bulkloader, using tbuild command');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (113, 'OldTextFileInput', 'Text file input', '从一个文本文件(几种格式)里读取数据{0}这些数据可以被传递到下一个步骤里...');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (114, 'TextFileOutputLegacy', 'Text file output', '写记录到一个文本文件.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (115, 'TransExecutor', 'Transformation executor', 'This step executes a Pentaho Data Integration transformation, sets parameters and passes rows.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (116, 'VerticaBulkLoader', 'Vertica bulk loader', 'Bulk load data into a Vertica database table');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (117, 'WebServiceLookup', 'Web 服务查询', '使用 Web 服务查询信息');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (118, 'XBaseInput', 'XBase输入', '从一个XBase类型的文件(DBF)读取记录');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (119, 'XMLInputStream', 'XML input stream (StAX)', 'This step is capable of processing very large and complex XML files very fast.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (120, 'XMLJoin', 'XML join', 'Joins a stream of XML-Tags into a target XML string');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (121, 'XMLOutput', 'XML output', 'Write data to an XML file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (122, 'XSDValidator', 'XSD validator', 'Validate XML source (files or streams) against XML Schema Definition.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (123, 'XSLT', 'XSL transformation', 'Make an XSL transformation');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (124, 'YamlInput', 'YAML 输入', 'Read YAML source (file or stream) parse them and convert them to rows and writes these to one or more output.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (125, 'ZipFile', 'Zip 文件', 'Zip a file.' || chr(10) || 'Filename will be extracted from incoming stream.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (126, 'Abort', '中止', 'Abort a transformation');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (127, 'FilesFromResult', '从结果获取文件', 'This step allows you to read filenames used or generated in a previous entry in a job.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (128, 'RowsFromResult', '从结果获取记录', '这个允许你从同一个任务的前一个条目里读取记录.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (129, 'ValueMapper', '值映射', 'Maps values of a certain field from one value to another');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (130, 'Formula', '公式', '使用 Pentaho 的公式库来计算公式');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (131, 'WriteToLog', '写日志', 'Write data to log');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (132, 'AnalyticQuery', '分析查询', 'Execute analytic queries over a sorted dataset (LEAD/LAG/FIRST/LAST)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (133, 'GroupBy', '分组', '以分组的形式创建聚合.{0}这个仅仅在一个已经排好序的输入有效.{1}如果输入没有排序, 仅仅两个连续的记录行被正确处理.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (134, 'SplitFieldToRows3', '列拆分为多行', 'Splits a single string field by delimiter and creates a new row for each split term');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (135, 'Denormaliser', '列转行', 'Denormalises rows by looking up key-value pairs and by assigning them to new fields in the输出 rows.{0}This method aggregates and needs the输入 rows to be sorted on the grouping fields');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (136, 'Delete', '删除', '基于关键字删除记录');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (137, 'Janino', '利用Janino计算Java表达式', 'Calculate the result of a Java Expression using Janino');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (138, 'StringCut', '剪切字符串', 'Strings cut (substring).');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (139, 'UnivariateStats', '单变量统计', 'This step computes some simple stats based on a single input field');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (140, 'Unique', '去除重复记录', '去除重复的记录行,保持记录唯一{0}这个仅仅基于一个已经排好序的输入.{1}如果输入没有排序, 仅仅两个连续的记录行被正确处理.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (141, 'SyslogMessage', '发送信息至syslog', 'Send message to syslog server');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (142, 'Mail', '发送邮件', 'Send eMail.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (143, 'MergeRows', '合并记录', '合并两个数据流, 并根据某个关键字排序.  这两个数据流被比较,以标识相等的、变更的、删除的和新建的记录.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (144, 'ExecProcess', '启动一个进程', 'Execute a process and return the result');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (145, 'UniqueRowsByHashSet', '唯一行 (哈希值)', 'Remove double rows and leave only unique occurrences by using a HashSet.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (146, 'FixedInput', '固定宽度文件输入', 'Fixed file input');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (147, 'MemoryGroupBy', '在内存中分组', 'Builds aggregates in a group by fashion.' || chr(10) || 'This step doesn''t require sorted input.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (148, 'Constant', '增加常量', '给记录增加一到多个常量');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (149, 'Sequence', '增加序列', '从序列获取下一个值');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (150, 'ProcessFiles', '处理文件', 'Process one file per row (copy or move or delete).' || chr(10) || 'This step only accept filename in input.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (151, 'FilesToResult', '复制文件到结果', 'This step allows you to set filenames in the result of this transformation.' || chr(10) || 'Subsequent job entries can then use this information.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (152, 'RowsToResult', '复制记录到结果', '使用这个步骤把记录写到正在执行的任务.{0}信息将会被传递给同一个任务里的下一个条目.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (153, 'SelectValues', '字段选择', '选择或移除记录里的字。{0}此外,可以设置字段的元数据: 类型, 长度和精度.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (154, 'StringOperations', '字符串操作', 'Apply certain operations like trimming, padding and others to string value.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (155, 'ReplaceString', '字符串替换', 'Replace all occurences a word in a string with another word.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (156, 'SymmetricCryptoTrans', '对称加密', 'Encrypt or decrypt a string using symmetric encryption.' || chr(10) || 'Available algorithms are DES, AES, TripleDES.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (157, 'SetValueConstant', '将字段值设置为常量', 'Set value of a field to a constant');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (158, 'Delay', '延迟行', 'Output each input row after a delay');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (159, 'DynamicSQLRow', '执行Dynamic SQL', 'Execute dynamic SQL statement build in a previous field');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (160, 'ExecSQL', '执行SQL脚本', '执行一个SQL脚本, 另外,可以使用输入的记录作为参数');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (161, 'ExecSQLRow', '执行SQL脚本(字段流替换)', 'Execute SQL script extracted from a field' || chr(10) || 'created in a previous step.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (162, 'JobExecutor', '执行作业', 'This step executes a Pentaho Data Integration job, sets parameters and passes rows.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (163, 'FieldSplitter', '拆分字段', '当你想把一个字段拆分成多个时,使用这个类型.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (164, 'SortedMerge', '排序合并', 'Sorted merge');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (165, 'SortRows', '排序记录', '基于字段值把记录排序(升序或降序)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (166, 'InsertUpdate', '插入 / 更新', '基于关键字更新或插入记录到数据库.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (167, 'NumberRange', '数值范围', 'Create ranges based on numeric field');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (168, 'SynchronizeAfterMerge', '数据同步', 'This step perform insert/update/delete in one go based on the value of a field.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (169, 'DBLookup', '数据库查询', '使用字段值在数据库里查询值');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (170, 'DBJoin', '数据库连接', '使用数据流里的值作为参数执行一个数据库查询');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (171, 'Validator', '数据检验', 'Validates passing data based on a set of rules');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (172, 'PrioritizeStreams', '数据流优先级排序', 'Prioritize streams in an order way.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (173, 'ReservoirSampling', '数据采样', '[Transform] Samples a fixed number of rows from the incoming stream');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (174, 'LoadFileInput', '文件内容加载至内存', 'Load file content in memory');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (175, 'TextFileInput', '文本文件输入', '从一个文本文件(几种格式)里读取数据{0}这些数据可以被传递到下一个步骤里...');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (176, 'TextFileOutput', '文本文件输出', '写记录到一个文本文件.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (177, 'Mapping', '映射 (子转换)', '运行一个映射 (子转换), 使用MappingInput和MappingOutput来指定接口的字段');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (178, 'MappingInput', '映射输入规范', '指定一个映射的字段输入');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (179, 'MappingOutput', '映射输出规范', '指定一个映射的字段输出');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (180, 'Update', '更新', '基于关键字更新记录到数据库');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (181, 'IfNull', '替换NULL值', 'Sets a field value to a constant if it is null.');
commit;

insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (182, 'SampleRows', '样本行', 'Filter rows based on the line number.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (183, 'JavaFilter', '根据Java代码过滤记录', 'Filter rows using java code');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (184, 'FieldsChangeSequence', '根据字段值来改变序列', 'Add sequence depending of fields value change.' || chr(10) || 'Each time value of at least one field change, PDI will reset sequence.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (185, 'WebServiceAvailable', '检查web服务是否可用', 'Check if a webservice is available');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (186, 'FileExists', '检查文件是否存在', 'Check if a file exists');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (187, 'FileLocked', '检查文件是否已被锁定', 'Check if a file is locked by another process');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (188, 'TableExists', '检查表是否存在', 'Check if a table exists on a specified connection');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (189, 'DetectEmptyStream', '检测空流', 'This step will output one empty row if input stream is empty' || chr(10) || '(ie when input stream does not contain any row)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (190, 'CreditCardValidator', '检验信用卡号码是否有效', 'The Credit card validator step will help you tell:' || chr(10) || '(1) if a credit card number is valid (uses LUHN10 (MOD-10) algorithm)' || chr(10) || '(2) which credit card vendor handles that number' || chr(10) || '(VISA, MasterCard, Diners Club, EnRoute, American Express (AMEX),...)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (191, 'MailValidator', '检验邮件地址', 'Check if an email address is valid.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (192, 'FuzzyMatch', '模糊匹配', 'Finding approximate matches to a string using matching algorithms.' || chr(10) || 'Read a field from a main stream and output approximative value from lookup stream.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (193, 'RegexEval', '正则表达式', 'Regular expression Evaluation' || chr(10) || 'This step uses a regular expression to evaluate a field. It can also extract new fields out of an existing field with capturing groups.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (194, 'TableCompare', '比较表', 'Compares 2 tables and gives back a list of differences');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (195, 'StreamLookup', '流查询', '从转换中的其它流里查询值.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (196, 'StepMetastructure', '流的元数据', 'This is a step to read the metadata of the incoming stream.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (197, 'SecretKeyGenerator', '生成密钥', 'Generate secret key for algorithms such as DES, AES, TripleDES.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (198, 'RowGenerator', '生成记录', '产生一些空记录或相等的行.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (199, 'RandomValue', '生成随机数', 'Generate random value');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (200, 'RandomCCNumberGenerator', '生成随机的信用卡号', 'Generate random valide (luhn check) credit card numbers');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (201, 'Dummy', '空操作 (什么也不做)', '这个步骤类型什么都不作.{0} 当你想测试或拆分数据流的时候有用.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (202, 'DimensionLookup', '维度查询/更新', '在一个数据仓库里更新一个渐变维 {0} 或者在这个维里查询信息.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (203, 'CombinationLookup', '联合查询/更新', '更新数据仓库里的一个junk维 {0} 可选的, 科研查询维里的信息.{1}junk维的主键是所有的字段.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (204, 'AutoDoc', '自动文档输出', 'This step automatically generates documentation based on input in the form of a list of transformations and jobs');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (205, 'DataGrid', '自定义常量数据', 'Enter rows of static data in a grid, usually for testing, reference or demo purpose');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (206, 'GetVariable', '获取变量', 'Determine the values of certain (environment or Kettle) variables and put them in field values.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (207, 'GetSubFolders', '获取子目录名', 'Read a parent folder and return all subfolders');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (208, 'GetFileNames', '获取文件名', 'Get file names from the operating system and send them to the next step.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (209, 'GetFilesRowsCount', '获取文件行数', 'Returns rows count for text files.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (210, 'SystemInfo', '获取系统信息', '获取系统信息,例如时间、日期.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (211, 'GetTableNames', '获取表名', 'Get table names from database connection and send them to the next step');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (212, 'GetRepositoryNames', '获取资源库配置', 'Lists detailed information about transformations and/or jobs in a repository');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (213, 'Flattener', '行扁平化', 'Flattens consequetive rows based on the order in which they appear in the输入 stream');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (214, 'Normaliser', '行转列', 'De-normalised information can be normalised using this step type.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (215, 'TableInput', '表输入', '从数据库表里读取信息.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (216, 'TableOutput', '表输出', '写信息到一个数据库表');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (217, 'Calculator', '计算器', '通过执行简单的计算创建一个新字段');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (218, 'JoinRows', '记录关联 (笛卡尔输出)', '这个步骤的输出是输入流的笛卡尔的结果.{0} 输出结果的记录数是输入流记录之间的乘积.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (219, 'Injector', '记录注射', 'Injector step to allow to inject rows into the transformation through the java API');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (220, 'MergeJoin', '记录集连接', 'Joins two streams on a given key and outputs a joined set. The input streams must be sorted on the join key');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (221, 'NullIf', '设置值为NULL', '如果一个字段值等于某个固定值,那么把这个字段值设置成null');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (222, 'SetVariable', '设置变量', 'Set environment variables based on a single input row.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (223, 'SetValueField', '设置字段值', 'Set value of a field with another value field');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (224, 'DetectLastRow', '识别流的最后一行', 'Last row will be marked');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (225, 'DBProc', '调用DB存储过程', '通过调用数据库存储过程获得返回值.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (226, 'StepsMetrics', '转换步骤信息统计', 'Return metrics for one or several steps');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (227, 'FilterRows', '过滤记录', '使用简单的相等来过滤记录');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (228, 'SSH', '运行SSH命令', 'Run SSH commands and returns result.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (229, 'Append', '追加流', 'Append 2 streams in an ordered way');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (230, 'MailInput', '邮件信息输入', 'Read POP3/IMAP server and retrieve messages');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (231, 'PropertyInput', '配置文件输入', 'Read data (key, value) from properties files.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (232, 'PropertyOutput', '配置文件输出', 'Write data to properties file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (233, 'BlockUntilStepsFinish', '阻塞数据直到步骤都完成', 'Block this step until selected steps finish.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (1, 'AccessInput', 'Access 输入', 'Read data from a Microsoft Access file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (2, 'AccessOutput', 'Access 输出', 'Stores records into an MS-Access database table.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (3, 'CheckSum', 'Add a checksum', 'Add a checksum column for each input row');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (4, 'AddXML', 'Add XML', 'Encode several fields into an XML fragment');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (5, 'FieldMetadataAnnotation', 'Annotate stream', 'Add more details to describe data for published models used by the Streamlined Data Refinery.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (6, 'AvroInput', 'Avro input', 'Reads data from an Avro file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (7, 'AvroInputNew', 'Avro input', 'Reads data from Avro file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (8, 'AvroOutput', 'Avro output', 'Writes data to an Avro file according to a mapping');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (9, 'BlockingStep', 'Blocking step', 'The Blocking step blocks all output until the very last row is received from the previous step.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (10, 'CallEndpointStep', 'Call endpoint', 'Call an endpoint of the Pentaho Server.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (11, 'CassandraInput', 'Cassandra input', 'Reads data from a Cassandra table');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (12, 'CassandraOutput', 'Cassandra output', 'Writes to a Cassandra table');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (13, 'ChangeFileEncoding', 'Change file encoding', 'Change file encoding and create a new file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (14, 'CloneRow', 'Clone row', 'Clone a row as many times as needed');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (15, 'ClosureGenerator', 'Closure generator', 'This step allows you to generates a closure table using parent-child relationships.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (16, 'ColumnExists', 'Column exists', 'Check if a column exists');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (17, 'ConcatFields', 'Concat fields', 'Concat fields together into a new field (similar to the Text File Output step)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (18, 'CouchDbInput', 'CouchDB input', 'Reads from a Couch DB view');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (19, 'CsvInput', 'CSV文件输入', 'Simple CSV file input');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (20, 'CubeInput', 'Cube 文件输入', '从一个cube读取记录.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (21, 'CubeOutput', 'Cube输出', '把数据写入一个cube');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (22, 'TypeExitEdi2XmlStep', 'EDI to XML', 'Converts Edi text to generic XML');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (23, 'ElasticSearchBulk', 'Elasticsearch bulk insert', 'Performs bulk inserts into ElasticSearch');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (24, 'ShapeFileReader', 'ESRI shapefile reader', 'Reads shape file data from an ESRI shape file and linked DBF file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (25, 'MetaInject', 'ETL metadata injection', 'ETL元数据注入');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (26, 'DummyStep', 'Example step', 'This is a plugin example step');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (27, 'ExcelInput', 'Excel输入', '从一个微软的Excel文件里读取数据. 兼容Excel 95, 97 and 2000.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (28, 'ExcelOutput', 'Excel输出', 'Stores records into an Excel (XLS) document with formatting information.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (29, 'getXMLData', 'Get data from XML', 'Get data from XML file by using XPath.' || chr(10) || ' This step also allows you to parse XML defined in a previous field.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (30, 'GetSlaveSequence', 'Get ID from slave server', 'Retrieves unique IDs in blocks from a slave server.  The referenced sequence needs to be configured on the slave server in the XML configuration file.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (31, 'RecordsFromStream', 'Get records from stream', 'This step allows you to read records from a streaming step.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (32, 'GetSessionVariableStep', 'Get session variables', 'Get session variables from the current user session.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (33, 'TypeExitGoogleAnalyticsInputStep', 'Google Analytics', 'Fetches data from google analytics account');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (34, 'GPBulkLoader', 'Greenplum bulk loader', 'Greenplum bulk loader');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (35, 'GPLoad', 'Greenplum load', 'Greenplum load');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (36, 'ParallelGzipCsvInput', 'GZIP CSV input', 'Parallel GZIP CSV file input reader');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (37, 'HadoopFileInputPlugin', 'Hadoop file input', 'Process files from an HDFS location');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (38, 'HadoopFileOutputPlugin', 'Hadoop file output', 'Create files in an HDFS location ');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (39, 'HBaseInput', 'HBase input', 'Reads data from a HBase table according to a mapping ');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (40, 'HBaseOutput', 'HBase output', 'Writes data to an HBase table according to a mapping');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (41, 'HBaseRowDecoder', 'HBase row decoder', 'Decodes an incoming key and HBase result object according to a mapping ');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (42, 'HL7Input', 'HL7 input', 'Reads and parses HL7 messages and outputs a series of values from the messages');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (43, 'HTTP', 'HTTP client', 'Call a web service over HTTP by supplying a base URL by allowing parameters to be set dynamically');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (44, 'HTTPPOST', 'HTTP post', 'Call a web service request over HTTP by supplying a base URL by allowing parameters to be set dynamically');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (45, 'InfobrightOutput', 'Infobright 批量加载', 'Load data to an Infobright database table');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (46, 'VectorWiseBulkLoader', 'Ingres VectorWise 批量加载', 'This step interfaces with the Ingres VectorWise Bulk Loader "COPY TABLE" command.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (47, 'UserDefinedJavaClass', 'Java 代码', 'This step allows you to program a step using Java code');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (48, 'ScriptValueMod', 'JavaScript代码', 'This is a modified plugin for the Scripting Values with improved interface and performance.' || chr(10) || 'Written & donated to open source by Martin Lange, Proconis : http://www.proconis.de');
commit;

insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (49, 'Jms2Consumer', 'JMS consumer', 'Consumes JMS streams');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (50, 'Jms2Producer', 'JMS producer', 'Produces JMS streams');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (51, 'JsonInput', 'JSON input', 'Extract relevant portions out of JSON structures (file or incoming field) and output rows');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (52, 'JsonOutput', 'JSON output', 'Create JSON block and output it in a field or a file.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (53, 'KafkaConsumerInput', 'Kafka consumer', 'Consume messages from a Kafka topic');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (54, 'KafkaProducerOutput', 'Kafka producer', 'Produce messages to a Kafka topic');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (55, 'LDAPInput', 'LDAP 输入', 'Read data from LDAP host');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (56, 'LDAPOutput', 'LDAP 输出', 'Perform Insert, upsert, update, add or delete operations on records based on their DN (Distinguished  Name).');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (57, 'LDIFInput', 'LDIF 输入', 'Read data from LDIF files');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (58, 'LucidDBStreamingLoader', 'LucidDB streaming loader', 'Load data into LucidDB by using Remote Rows UDX.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (59, 'HadoopEnterPlugin', 'MapReduce input', 'Enter a Hadoop Mapper or Reducer transformation');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (60, 'HadoopExitPlugin', 'MapReduce output', 'Exit a Hadoop Mapper or Reducer transformation ');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (61, 'TypeExitExcelWriterStep', 'Microsoft Excel 输出', 'Writes or appends data to an Excel file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (62, 'MondrianInput', 'Mondrian 输入', 'Execute and retrieve data using an MDX query against a Pentaho Analyses OLAP server (Mondrian)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (63, 'MonetDBAgileMart', 'MonetDB Agile Mart', 'Load data into MonetDB for Agile BI use cases');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (64, 'MonetDBBulkLoader', 'MonetDB 批量加载', 'Load data into MonetDB by using their bulk load command in streaming mode.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (65, 'MongoDbInput', 'MongoDB input', 'Reads from a Mongo DB collection');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (66, 'MongoDbOutput', 'MongoDB output', 'Writes to a Mongo DB collection');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (67, 'MQTTConsumer', 'MQTT consumer', 'Subscribes and streams an MQTT Topic');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (68, 'MQTTProducer', 'MQTT producer', 'Produce messages to a MQTT Topic');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (69, 'MultiwayMergeJoin', 'Multiway merge join', 'Multiway merge join');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (70, 'MySQLBulkLoader', 'MySQL 批量加载', 'MySQL bulk loader step, loading data over a named pipe (not available on MS Windows)');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (71, 'OlapInput', 'OLAP 输入', 'Execute and retrieve data using an MDX query against any XML/A OLAP datasource using olap4j');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (72, 'OraBulkLoader', 'Oracle 批量加载', 'Use Oracle bulk loader to load data');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (73, 'OrcInput', 'ORC input', 'Reads data from ORC file');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (74, 'OrcOutput', 'ORC output', 'Writes data to an Orc file according to a mapping');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (75, 'PaloCellInput', 'Palo cell input', 'Reads data from a defined Palo Cube ');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (76, 'PaloCellOutput', 'Palo cell output', 'Writes data to a defined Palo Cube');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (77, 'PaloDimInput', 'Palo dim input', 'Reads data from a defined Palo Dimension');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (78, 'PaloDimOutput', 'Palo dim output', 'Writes data to defined Palo Dimension');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (79, 'ParquetInput', 'Parquet input', 'Reads data from a Parquet file.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (80, 'ParquetOutput', 'Parquet output', 'Writes data to a Parquet file according to a mapping.');
insert into R_STEP_TYPE (id_step_type, code, description, helptext)
values (81, 'PentahoReportingOutput', 'Pentaho 报表输出', 'Executes an existing report (PRPT)');
commit;

insert into R_USER (id_user, login, password, name, description, enabled)
values (1, 'admin', '2be98afc86aa7f2e4cb79ce71da9fa6d4', 'Administrator', 'User manager', '1');
insert into R_USER (id_user, login, password, name, description, enabled)
values (2, 'guest', '2be98afc86aa7f2e4cb79ce77cb97bcce', 'Guest account', 'Read-only guest account', '1');
commit;

insert into R_VERSION (id_version, major_version, minor_version, upgrade_date, is_upgrade)
values (1, 5, 0, to_date('27-03-2023 15:03:37', 'dd-mm-yyyy hh24:mi:ss'), '0');
commit;

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值