Oracle-REDO日志

Oracle-REDO日志

1、基本概念

在线日志文件即REDO LOG 文件,“在线”这两个字是用于和归档日志区分的。在线日志是数据库中十分重要的文件。主要用于记录数据库的变更信息。Oracle 使用在线日志文件记录数据库变更信息的目的是,当数据库实例宕掉的时候,可以通过在线日志文件中记录的信息进行恢复。

在线日志的存在,解决了数据库实例突然宕掉或者服务器宕机后的系统恢复问题。有了在线日志文件,就不用害怕Oracle数据库突然宕掉后数据库实例无法自动修复了,因为它的固有机制可以确保数据库完整恢复。

2、查看日志切换频率

set line 400 pagesize 50000
alter session set nls_date_format = 'yyyy-mm-dd hh24:mi:ss';

--查询0点-8点
select '18-'as "18-",trunc(first_time) "Date",to_char(first_time, 'Dy') "Day", count(1) "Total", sum(decode(to_char(first_time, 'hh24'), '00', 1, 0)) "h0", sum(decode(to_char(first_time, 'hh24'), '01', 1, 0)) "h1", sum(decode(to_char(first_time, 'hh24'), '02', 1, 0)) "h2", sum(decode(to_char(first_time, 'hh24'), '03', 1, 0)) "h3", sum(decode(to_char(first_time, 'hh24'), '04', 1, 0)) "h4", sum(decode(to_char(first_time, 'hh24'), '05', 1, 0)) "h5", sum(decode(to_char(first_time, 'hh24'), '06', 1, 0)) "h6", sum(decode(to_char(first_time, 'hh24'), '07', 1, 0)) "h7", sum(decode(to_char(first_time, 'hh24'), '08', 1, 0)) "h8" from v$log_history where first_time > (sysdate - 7) group by trunc(first_time), to_char(first_time, 'Dy') order by 2;

--查询10点-16点
select '18-'as "18-",trunc(first_time) "Date",to_char(first_time, 'Dy') "Day", count(1) "Total", sum(decode(to_char(first_time, 'hh24'), '09', 1, 0)) "h9", sum(decode(to_char(first_time, 'hh24'), '10', 1, 0)) "h10", sum(decode(to_char(first_time, 'hh24'), '11', 1, 0)) "h11", sum(decode(to_char(first_time, 'hh24'), '12', 1, 0)) "h12", sum(decode(to_char(first_time, 'hh24'), '13', 1, 0)) "h13", sum(decode(to_char(first_time, 'hh24'), '14', 1, 0)) "h14", sum(decode(to_char(first_time, 'hh24'), '15', 1, 0)) "h15", sum(decode(to_char(first_time, 'hh24'), '16', 1, 0)) "h16" from v$log_history where first_time > (sysdate - 7) group by trunc(first_time), to_char(first_time, 'Dy') order by 2;

--查询17点-23点
select '18-'as "18-",trunc(first_time) "Date",to_char(first_time, 'Dy') "Day", count(1) "Total", sum(decode(to_char(first_time, 'hh24'), '17', 1, 0)) "h17", sum(decode(to_char(first_time, 'hh24'), '18', 1, 0)) "h18", sum(decode(to_char(first_time, 'hh24'), '19', 1, 0)) "h19", sum(decode(to_char(first_time, 'hh24'), '20', 1, 0)) "h20", sum(decode(to_char(first_time, 'hh24'), '21', 1, 0)) "h21", sum(decode(to_char(first_time, 'hh24'), '22', 1, 0)) "h22", sum(decode(to_char(first_time, 'hh24'), '23', 1, 0)) "h23" from v$log_history where first_time > (sysdate - 7) group by trunc(first_time), to_char(first_time, 'Dy') order by 2;

--查询全天24小时
--select trunc(first_time) "Date",to_char(first_time, 'Dy') "Day", count(1) "Total", sum(decode(to_char(first_time, 'hh24'), '00', 1, 0)) "h0", sum(decode(to_char(first_time, 'hh24'), '01', 1, 0)) "h1", sum(decode(to_char(first_time, 'hh24'), '02', 1, 0)) "h2", sum(decode(to_char(first_time, 'hh24'), '03', 1, 0)) "h3", sum(decode(to_char(first_time, 'hh24'), '04', 1, 0)) "h4", sum(decode(to_char(first_time, 'hh24'), '05', 1, 0)) "h5", sum(decode(to_char(first_time, 'hh24'), '06', 1, 0)) "h6", sum(decode(to_char(first_time, 'hh24'), '07', 1, 0)) "h7", sum(decode(to_char(first_time, 'hh24'), '08', 1, 0)) "h8", sum(decode(to_char(first_time, 'hh24'), '09', 1, 0)) "h9", sum(decode(to_char(first_time, 'hh24'), '10', 1, 0)) "h10", sum(decode(to_char(first_time, 'hh24'), '11', 1, 0)) "h11", sum(decode(to_char(first_time, 'hh24'), '12', 1, 0)) "h12", sum(decode(to_char(first_time, 'hh24'), '13', 1, 0)) "h13", sum(decode(to_char(first_time, 'hh24'), '14', 1, 0)) "h14", sum(decode(to_char(first_time, 'hh24'), '15', 1, 0)) "h15", sum(decode(to_char(first_time, 'hh24'), '16', 1, 0)) "h16", sum(decode(to_char(first_time, 'hh24'), '17', 1, 0)) "h17", sum(decode(to_char(first_time, 'hh24'), '18', 1, 0)) "h18", sum(decode(to_char(first_time, 'hh24'), '19', 1, 0)) "h19", sum(decode(to_char(first_time, 'hh24'), '20', 1, 0)) "h20", sum(decode(to_char(first_time, 'hh24'), '21', 1, 0)) "h21", sum(decode(to_char(first_time, 'hh24'), '22', 1, 0)) "h22", sum(decode(to_char(first_time, 'hh24'), '23', 1, 0)) "h23" from v$log_history where first_time > (sysdate - 7) group by trunc(first_time), to_char(first_time, 'Dy') order by 1;
set linesize 200
set pagesize 100
column  day     format a20            heading 'Day'
column  d_0     format a3               heading '00'
column  d_1     format a3               heading '01'
column  d_2     format a3               heading '02'
column  d_3     format a3               heading '03'
column  d_4     format a3               heading '04'
column  d_5     format a3               heading '05'
column  d_6     format a3               heading '06'
column  d_7     format a3               heading '07'
column  d_8     format a3               heading '08'
column  d_9     format a3               heading '09'
column  d_10    format a3               heading '10'
column  d_11    format a3               heading '11'
column  d_12    format a3               heading '12'
column  d_13    format a3               heading '13'
column  d_14    format a3               heading '14'
column  d_15    format a3               heading '15'
column  d_16    format a3               heading '16'
column  d_17    format a3               heading '17'
column  d_18    format a3               heading '18'
column  d_19    format a3               heading '19'
column  d_20    format a3               heading '20'
column  d_21    format a3               heading '21'
column  d_22    format a3               heading '22'
column  d_23    format a3               heading '23'
select
        substr(to_char(FIRST_TIME,'YYYY/MM/DD,DY'),1,15) day, count(1) "Total",
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'00',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'00',1,0))) d_0,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'01',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'01',1,0))) d_1,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'02',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'02',1,0))) d_2,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'03',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'03',1,0))) d_3,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'04',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'04',1,0))) d_4,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'05',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'05',1,0))) d_5,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'06',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'06',1,0))) d_6,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'07',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'07',1,0))) d_7,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'08',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'08',1,0))) d_8,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'09',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'09',1,0))) d_9,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'10',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'10',1,0))) d_10,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'11',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'11',1,0))) d_11,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'12',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'12',1,0))) d_12,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'13',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'13',1,0))) d_13,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'14',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'14',1,0))) d_14,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'15',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'15',1,0))) d_15,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'16',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'16',1,0))) d_16,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'17',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'17',1,0))) d_17,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'18',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'18',1,0))) d_18,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'19',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'19',1,0))) d_19,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'20',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'20',1,0))) d_20,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'21',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'21',1,0))) d_21,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'22',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'22',1,0))) d_22,
        decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'23',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),
'23',1,0))) d_23
from
        v$log_history where first_time> sysdate-14
group by
        substr(to_char(FIRST_TIME,'YYYY/MM/DD,DY'),1,15)
order by
        substr(to_char(FIRST_TIME,'YYYY/MM/DD,DY'),1,15) asc;

set linesize 80
set pagesize 14
clear columns
Day                 Total  00  01  02  03  04  05  06  07  08  09  10  11  12  13  14  15  16  17  18  19  20  21  22  23
--------------- ---------- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---
2020/11/14,SAT		 1 -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   -   -   -   -   -   -
2020/11/15,SUN		 1 -   -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   -   -   -   -   -
2020/11/16,MON		 1 -   -   -   -   -   -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   -
2020/11/17,TUE		 2 -   -   -   -   -   -   -   -   -   -   -   -   -   -   1   -   1   -   -   -   -   -   -   -
2020/11/18,WED		 3 -   1   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   1   -   -   -   -   -   -
2020/11/19,THU		 1 -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   1   -   -   -
2020/11/21,SAT		 4 -   -   1   -   -   -   -   -   -   -   2   -   -   1   -   -   -   -   -   -   -   -   -   -
2020/11/22,SUN		 1 -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -
2020/11/23,MON		 1 -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   -   -   -   -   -   -   -
2020/11/24,TUE		 1 -   -   -   -   -   -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   -
2020/11/25,WED		 2 -   -   -   -   -   -   -   -   -   -   -   1   -   -   -   -   -   -   -   -   1   -   -   -
2020/11/27,FRI		 5 -   -   1   -   -   -   -   -   -   -   -   -   -   -   3   1   -   -   -   -   -   -   -   -

12 rows selected.

3、查看日志组状态

set line 400 pagesize 500
col member for a50
select a.group#,thread#,sequence#,b.member,bytes/1024/1024 "Size(M)",a.status,archived,to_char(first_time,'yyyy/mm/dd hh24:mi:ss') "First_Time" from v$log a , v$logfile b where a.group#=b.group# order by group#,thread#;

    GROUP#    THREAD#  SEQUENCE# MEMBER 			  Size(M) 		STATUS		ARC First_Time
---------- ---------- ---------- ------------------	  -------- ---------------- --- -------------------
	 1	    1	       7 /oradata/orcl/redo01.log	  500 		INACTIVE 		NO  2020/10/08 03:00:10
	 2	    1	       8 /oradata/orcl/redo02.log	  500 		CURRENT			NO  2020/10/12 19:00:01
	 3	    1	       6 /oradata/orcl/redo03.log	  500 		INACTIVE 		NO  2020/10/03 20:00:30

set line 400 pagesize 500
select GROUP#,THREAD#,SEQUENCE#,BYTES/1024/1024 "Size(M)",BLOCKSIZE,MEMBERS,ARCHIVED,STATUS,FIRST_CHANGE#,FIRST_TIME,NEXT_CHANGE#,NEXT_TIME from v$log;

    GROUP#    THREAD#  SEQUENCE#    Size(M)  BLOCKSIZE	  MEMBERS ARC STATUS	       FIRST_CHANGE# FIRST_TIME   NEXT_CHANGE# NEXT_TIME
---------- ---------- ---------- ---------- ---------- ---------- --- ---------------- ------------- ------------ ---
	 1	    1	       7	500	   512		1 NO  INACTIVE		     1138794 08-OCT-20	       1329662 12-OCT-20
	 2	    1	       8	500	   512		1 NO  CURRENT		     1329662 12-OCT-20	    2.8147E+14
	 3	    1	       6	500	   512		1 NO  INACTIVE		      962725 03-OCT-20	       1138794 08-OCT-20

set line 400 pagesize 500
col member for a50
select * from v$logfile;

    GROUP# STATUS  TYPE    MEMBER					      IS_
---------- ------- ------- -------------------------------------------------- ---
	 1	   ONLINE  /oradata/orcl/redo01.log			      NO
	 2	   ONLINE  /oradata/orcl/redo02.log			      NO
	 3	   ONLINE  /oradata/orcl/redo03.log			      NO

4、日志操作

4、1、单实例添加REDO

ALTER DATABASE ADD LOGFILE GROUP 6('/usr/oracle/app/oradata/orcl/redo04a.log') SIZE 2048M;
ALTER DATABASE ADD LOGFILE GROUP 7('/usr/oracle/app/oradata/orcl/redo04a.log') SIZE 2048M;

4、2、RAC添加REDO

--顺序添加REDO组
alter database add logfile thread 1('+DATA') size 200M;
alter database add logfile thread 2('+DATA') size 200M;

--指定组添加REDO组
alter database add logfile thread 1 GROUP 5('+DATA') size 2G;
alter database add logfile thread 2 GROUP 6('+DATA') size 2G;

4、3、切换当前日志到新的日志组

alter system switch logfile;

--或

alter system checkpoint;

4、4、删除旧的日志组

alter database drop logfile group 1;
alter database drop logfile group 2;
alter database drop logfile group 3;

--操作系统下删除原日志组1、2、3中的文件;
--注意:每一步删除drop操作,都需要手工删除操作系统中的实体文件;
--千万注意删除时REDO日志路径,不要误删文件。

4、5、Oracle-REDO日志修改大小

4、5、1、查询redo当前状态
set line 400 pagesize 500
col member for a50
select a.group#,thread#,sequence#,b.member,bytes/1024/1024 "Size(M)",a.status,archived,to_char(first_time,'yyyy/mm/dd hh24:mi:ss') "First_Time" from v$log a , v$logfile b where a.group#=b.group# order by group#,thread#;
--只能删除状态为INACTIVE状态的redo组
    GROUP#    THREAD#  SEQUENCE# MEMBER 					       Size(M) STATUS		ARC First_Time
---------- ---------- ---------- -------------------------------------------------- ---------- ----------------
	 1	    1	       7 /oradata/orcl/redo01.log				   500 INACTIVE 	NO  2020/10/08 03:00:10
	 2	    1	       8 /oradata/orcl/redo02.log				   500 CURRENT		NO  2020/10/12 19:00:01
	 3	    1	       6 /oradata/orcl/redo03.log				   500 INACTIVE 	NO  2020/10/03 20:00:30

4、5、2、删除并重建redo日志组
alter database drop logfile group 1;
--操作系统下删除原日志组1、2、3中的文件;
--注意:每一步删除drop操作,都需要手工删除操作系统中的实体文件;
--千万注意删除时REDO日志路径,不要误删文件。


--操作系统层面删除redo日志文件
!rm -rf /oradata/orcl/redo01.log


ALTER DATABASE ADD LOGFILE GROUP1('/oradata/orcl/redo01.log') SIZE 2048M;
4、5、3、切换日志组方便后续操作其他redo组
--切换当前日志到新的日志组
alter system switch logfile;

--或

alter system checkpoint;
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值