dmagent.ini

#general

id_policy = 0 #agent id generate policy, 0: mac & ip, 1: ip

center_url = http://192.168.2.10:8080/dem #center url location, eg: http://192.168.2.10:8080/dem

ip_list = [] #custom local ip list, auto collect if not config, example: [192.168.1.100, 192.168.2.100]

dm_bin_path = #dm bin path for jni call

collect_data_by_command = false #whether enable collect systeminfo data by operating system command, only available in Linux

deploy_umask = 0077 #set file mode creation mask in deployment function, only available in Linux(Unix)

#ap

ap_enable = true #whether enable ap plugin

ap_port = 6363 #ap listen tcp/ip port, , range[1~65535]

#service

service_enable = true #whether enable service plugin

service_port = 6364 #service listen rmi port, range[1~65535]

#gather

gather_enable = true #whether enable gather plugin

gather_center_servlet = /dem/dma_agent #servlet for gather

gather_offline_save_local = false #whether enable gather data save to file, when center connect fail

gather_offline_data_directory = gather #data local save directory, when agent offline

gather_offline_flush_freq = 60 #(s)data flush frequence, when agent offline

gather_offline_send_freq = 300 #(s)data resend to center frequence, when agent offline

gather_mf_info_freq = [60] #mainframe information gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_mf_stat_freq = [60] #mainframe statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_mf_disk_freq = [600] #mainframe disk gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_mf_process_freq = [60] #mainframe process gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_mf_core_freq = [60] #mainframe core gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_info_freq = [60] #database information gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_stat_freq = [60] #database statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_deadlock_freq = [60] #database deadlock gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_event_freq = [60] #database event gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_session_freq = [60] #database session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_session_sql_freq = [60] #database session sql gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_tablespace_freq = [600] #database tablespace gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_user_freq = [600] #database user gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_table_freq = [60] #database table gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_audit_record_freq = [0] #database audit record gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_serverlog_freq = [60] #database running log gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_serverlog_distinct_enable = true #whether database running log gather distinct.

gather_db_serverlog_infolevel_enable = false #whether database running log gather info level.

gather_db_rlog_freq = [60] #database redo log gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_backup_freq = [60] #database backup check frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_arch_freq = [60] #database arch gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_backupset_freq = [60] #database backupset gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_ini_file_freq = [180] #database config files(e.g. dm.ini, dmarch.ini ...) gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_sqllog_file_freq = [60] #database sql log from file gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_sqllog_file_exectime = 1000 #(ms)database sql exec time

gather_db_sqllog_file_buffer = 100 #database sql data buffer size

gather_db_sqllog_view_freq = [60] #database sql log from view gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_sqllog_view_exectime = 1000 #(ms)database sql exec time

gather_db_group_dpc_stat_freq = [0] #database group dpc statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dsc_stat_freq = [0] #database group dsc statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_mpp_stat_freq = [0] #database group mpp statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_rw_stat_freq = [0] #database group rw statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dw_stat_freq = [0] #database group dw statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dpc_sql_freq = [0] #database group dpc sql gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dpc_sql_plan_freq = [0] #database group dpc sql plan gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dpc_session_freq = [0] #database group dsc session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dsc_session_freq = [0] #database group dsc session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_mpp_session_freq = [0] #database group mpp session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_rw_session_freq = [0] #database group rw session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_dw_session_freq = [0] #database group dw session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_mcopy_stat_freq = [0] #database group mcopy statistic gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_group_mcopy_session_freq = [0] #database group mcopy session gather frequence, format: [freq(s), year, month, day, hour, minute, second]

gather_db_lock_freq = [60] #database lock gather frequence, format: [freq(s), year, month, day, hour, minute, second]

#upgrade

upgrade_enable = true #whether enable upgrade plugin

upgrade_center_servlet = /dem/dma_agent #servlet for version-check and upgrade

upgrade_version_check_freq = 600 #(s)Agent version check frequence, 0 for never check

#isql

isql_auto_commit = true #enable auto commit

isql_rs_batch_show = true #enable result batch show

isql_rs_batch_show_size = 50 #result batch show size

isql_col_max_width = 50 #max column show width

isql_col_auto_wrap = true #auto wrap when over max column show width limit

isql_col_escape = true #escape column value, replace special such as \r \n

isql_highlight_sql = true #whether enable sql highlight

isql_file_encoding = #sql file encoding, empty use system default encoding

#inspect

inspect_output_directory = . #the directory where output inspect report saves

inspect_databases #inspect database list

{

{

url = jdbc:dm://localhost:5236 #database connect jdbc url

user = SYSDBA #database connect user

password = SYSDBA #database connect password

backup_directory = #the dir(s) for searching backupsets, separated by ';'. eg /home/bak1;/home/bak2

agent_service_url = rpc://localhost:6364 #agent rpc service url

archive_days = 30 #days to get archive files, can't be negative

backup_days = 30 #days to get backupsets, can't be negative

backup_check_days = 3 #days to check backupsets, can't be negative. We suggest it not be large since checking backupsets is time-consuming.

dead_lock_days = 30 #days to get deadlocks, can't be negative

instance_log_days = 30 #days to get run logs, can't be negative

tasks #inspect tasks definition, tasks will be executed by sequence. All pre-defined tasks will be executed when task list is empty. You can define customized tasks, eg. MyCustomizedTask\n{\nselect 1,2,3 from dual;\n}\n. Pre-define tasks: [PerformanceTask, TablespaceTask, RedoLogTask, ArchiveTask, BackupTask, UserTask, DeadlockTask, RunLogTask, DmIniTask]

{

}

}

}

#command

command_url = #url to echo message

command_id = #running command id

command_execute_id = #running execute id

command_item_configs #command item configs

{

{

id = 0 #current item id

dependency_id = [] #current item dependency id array, format: [id1, ..., idn]

user = test #agent connect user

password = test #agent connect password

agent_service_url = rpc://localhost:6364 #agent connect url

working_directory = #working directory

timeout = 0 #commands timeout

commands #run commands

{

}

}

}

#sql

sql_url = #url to echo message

sql_id = #running sql id

sql_execute_id = #running execute id

sql_max_fetch_rows = 100 #max fetch rows for query statement

sql_col_max_width = 50 #max column show width

sql_col_auto_wrap = true #auto wrap when over max column show width limit

sql_col_escape = true #escape column value, replace special such as \r \n

sql_file_encoding = #sql file encoding, empty use system default encoding

sql_item_configs #sql item configs

{

{

id = 0 #current item id

dependency_id = [] #current item dependency id array, format: [id1, ..., idn]

user = SYSDBA #database connect user

password = SYSDBA #database connect password

url = jdbc:dm://localhost:5236 #database connect url

continue_when_error = false #whether continue when sql execute error

timeout = 0 #(s)sql execute timeout

sqls #execute sqls

{

}

}

}

#replace

replace_url = #url to echo progress message

replace_id = #running replace id

replace_execute_id = #running execute id

replace_show_detail = false #show all details about replace

replace_item_configs #replace items config list

{

{

id = 0 #current item id

dependency_id = [] #current item dependency id array, format: [id1, ..., idn]

agent_service_url = rpc://localhost:6364 #agent connect url

source_file = source_file_full_path #source file(zip) to replace remote site

dest_directory = dest_directory_full_path #destination directory

backup_enable = true #whether backup replaced files

owner = #replace file's owner, only available in Linux(Unix)

group = #replace file's group, only available in Linux(Unix)

permission = #replace file's permissions, only available in Linux(Unix)

retain_exist_file_props = true #retain properties(owner,group,permission) of existing files, only available in Linux(Unix)

replace_readonly_file_enable = false #whether replace the readonly files, only available in Linux(Unix)

user = #script execution user, only available in Linux(Unix)

password = #password of script execution user, only available in Linux(Unix)

pre_script_path = #the script' path to be run before replacing, ignored if not set. need to return 0 on success.

post_script_path = #the script' path to be run after replacing, ignored if not set. need to return 0 on success.

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值