wls1032_generic.jar
http://www.oracle.com/technetwork/middleware/ias/downloads/wls-main-097127.html
Search This Blog
Total Pageviews
Sunday, 29 May 2011
Friday, 27 May 2011
log switch info archive log info
log switch info archive log info archive log info redo history information redo history http://anuj-singh.blogspot.com/2014/11/oracle-archive-generation-report.html http://anuj-singh.blogspot.com/2023/ -- Oracle flashback Hourly info from v$archived_log !!! set linesize 300 pagesize 1000 column day format a3 column total format 999999 column h00 format 99999 column h01 format 99999 column h02 format 99999 column h03 format 99999 column h04 format 99999 column h04 format 99999 column h05 format 99999 column h06 format 99999 column h07 format 99999 column h08 format 99999 column h09 format 99999 column h10 format 99999 column h11 format 99999 column h12 format 99999 column h13 format 99999 column h14 format 99999 column h15 format 99999 column h16 format 99999 column h17 format 99999 column h18 format 99999 column h19 format 99999 column h20 format 99999 column h21 format 99999 column h22 format 99999 column h23 format 99999 column h24 format 99999 break on report compute max of "total" on report compute max of "h00" on report compute max of "h01" on report compute max of "h02" on report compute max of "h03" on report compute max of "h04" on report compute max of "h05" on report compute max of "h06" on report compute max of "h07" on report compute max of "h08" on report compute max of "h09" on report compute max of "h10" on report compute max of "h11" on report compute max of "h12" on report compute max of "h13" on report compute max of "h14" on report compute max of "h15" on report compute max of "h16" on report compute max of "h17" on report compute max of "h18" on report compute max of "h19" on report compute max of "h20" on report compute max of "h21" on report compute max of "h22" on report compute max of "h23" on report compute sum of NUM on report compute sum of GB on report compute sum of MB on report compute sum of KB on report REM Script to Report the Redo Log Switch History alter session set nls_date_format='DD MON YYYY'; select thread#, trunc(completion_time) as "date", to_char(completion_time,'Dy') as "Day", count(1) as "total", sum(decode(to_char(completion_time,'HH24'),'00',1,0)) as "h00", sum(decode(to_char(completion_time,'HH24'),'01',1,0)) as "h01", sum(decode(to_char(completion_time,'HH24'),'02',1,0)) as "h02", sum(decode(to_char(completion_time,'HH24'),'03',1,0)) as "h03", sum(decode(to_char(completion_time,'HH24'),'04',1,0)) as "h04", sum(decode(to_char(completion_time,'HH24'),'05',1,0)) as "h05", sum(decode(to_char(completion_time,'HH24'),'06',1,0)) as "h06", sum(decode(to_char(completion_time,'HH24'),'07',1,0)) as "h07", sum(decode(to_char(completion_time,'HH24'),'08',1,0)) as "h08", sum(decode(to_char(completion_time,'HH24'),'09',1,0)) as "h09", sum(decode(to_char(completion_time,'HH24'),'10',1,0)) as "h10", sum(decode(to_char(completion_time,'HH24'),'11',1,0)) as "h11", sum(decode(to_char(completion_time,'HH24'),'12',1,0)) as "h12", sum(decode(to_char(completion_time,'HH24'),'13',1,0)) as "h13", sum(decode(to_char(completion_time,'HH24'),'14',1,0)) as "h14", sum(decode(to_char(completion_time,'HH24'),'15',1,0)) as "h15", sum(decode(to_char(completion_time,'HH24'),'16',1,0)) as "h16", sum(decode(to_char(completion_time,'HH24'),'17',1,0)) as "h17", sum(decode(to_char(completion_time,'HH24'),'18',1,0)) as "h18", sum(decode(to_char(completion_time,'HH24'),'19',1,0)) as "h19", sum(decode(to_char(completion_time,'HH24'),'20',1,0)) as "h20", sum(decode(to_char(completion_time,'HH24'),'21',1,0)) as "h21", sum(decode(to_char(completion_time,'HH24'),'22',1,0)) as "h22", sum(decode(to_char(completion_time,'HH24'),'23',1,0)) as "h23" from v$archived_log where first_time > trunc(sysdate-10) and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') ----<<<<<< group by thread#, trunc(completion_time), to_char(completion_time, 'Dy') order by 2,1 ;set term off term on DEFINE size_label=Gb DEFINE size_divider="1024/1024/1024" set linesize 200 pagesize 1000 column day format a3 column total format 9999 column h00 format 999 column h01 format 999 column h02 format 999 column h03 format 999 column h04 format 999 column h04 format 999 column h05 format 999 column h06 format 999 column h07 format 999 column h08 format 999 column h09 format 999 column h10 format 999 column h11 format 999 column h12 format 999 column h13 format 999 column h14 format 999 column h15 format 999 column h16 format 999 column h17 format 999 column h18 format 999 column h19 format 999 column h20 format 999 column h21 format 999 column h22 format 999 column h23 format 999 column h24 format 999 break on report compute max of "total" on report compute max of "h00" on report compute max of "h01" on report compute max of "h02" on report compute max of "h03" on report compute max of "h04" on report compute max of "h05" on report compute max of "h06" on report compute max of "h07" on report compute max of "h08" on report compute max of "h09" on report compute max of "h10" on report compute max of "h11" on report compute max of "h12" on report compute max of "h13" on report compute max of "h14" on report compute max of "h15" on report compute max of "h16" on report compute max of "h17" on report compute max of "h18" on report compute max of "h19" on report compute max of "h20" on report compute max of "h21" on report compute max of "h22" on report compute max of "h23" on report compute sum of NUM on report compute sum of GB on report compute sum of MB on report compute sum of KB on report col "date" for a25 REM Script to Report the Redo Log Switch History alter session set nls_date_format='DD MON YYYY'; select thread#, trunc(completion_time) as "date", to_char(completion_time,'Dy') as "Day", count(1) as "total", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'00',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h00", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'01',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h01", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'02',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h02", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'03',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h03", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'04',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h04", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'05',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h05", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'06',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h06", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'07',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h07", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'08',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h08", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'09',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h09", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'10',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h10", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'11',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h11", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'12',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h12", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'13',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h13", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'14',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h14", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'15',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h15", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'16',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h16", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'17',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h17", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'18',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h18", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'19',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h19", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'20',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h20", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'21',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h21", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'22',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h22", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'23',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) as "h23" from v$archived_log where first_time > trunc(sysdate-10) and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') group by thread#, trunc(completion_time), to_char(completion_time, 'Dy') order by 2,1;THREAD# date Day total h00 h01 h02 h03 h04 h05 h06 h07 h08 h09 h10 h11 h12 h13 h14 h15 h16 h17 h18 h19 h20 h21 h22 h23 ---------- ------------------------- --- ----- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- 1 01 JUL 2023 Sat 7 1 2 1 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 02 JUL 2023 Sun 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 03 JUL 2023 Mon 13 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 3 1 0 0 1 2 1 1 2--- Gbcompute sum of NUM on report compute sum of GB on report compute sum of MB on report compute sum of KB on report set term off term on DEFINE size_label=Gb DEFINE size_divider="1024/1024/1024" DEFINE round_precision=2 SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000 COLUMN separator HEADING "!|!|!" FORMAT A1 COLUMN "Date" HEADING "Date" FORMAT A9 COLUMN "Total" HEADING "Day|Total|(&size_label)" FORMAT 9999 COLUMN "Day" HEADING "Day" FORMAT A3 COLUMN h0 HEADING "h00|(&size_label)" FORMAT 999 COLUMN h1 HEADING "h01|(&size_label)" FORMAT 999 COLUMN h2 HEADING "h02|(&size_label)" FORMAT 999 COLUMN h3 HEADING "h03|(&size_label)" FORMAT 999 COLUMN h4 HEADING "h04|(&size_label)" FORMAT 999 COLUMN h5 HEADING "h05|(&size_label)" FORMAT 999 COLUMN h6 HEADING "h06|(&size_label)" FORMAT 999 COLUMN h7 HEADING "h07|(&size_label)" FORMAT 999 COLUMN h8 HEADING "h08|(&size_label)" FORMAT 999 COLUMN h9 HEADING "h09|(&size_label)" FORMAT 999 COLUMN h10 HEADING "h10|(&size_label)" FORMAT 999 COLUMN h11 HEADING "h11|(&size_label)" FORMAT 999 COLUMN h12 HEADING "h12|(&size_label)" FORMAT 999 COLUMN h13 HEADING "h13|(&size_label)" FORMAT 999 COLUMN h14 HEADING "h14|(&size_label)" FORMAT 999 COLUMN h15 HEADING "h15|(&size_label)" FORMAT 999 COLUMN h16 HEADING "h16|(&size_label)" FORMAT 999 COLUMN h17 HEADING "h17|(&size_label)" FORMAT 999 COLUMN h18 HEADING "h18|(&size_label)" FORMAT 999 COLUMN h19 HEADING "h19|(&size_label)" FORMAT 999 COLUMN h20 HEADING "h20|(&size_label)" FORMAT 999 COLUMN h21 HEADING "h21|(&size_label)" FORMAT 999 COLUMN h22 HEADING "h22|(&size_label)" FORMAT 999 COLUMN h23 HEADING "h23|(&size_label)" FORMAT 999 set linesize 300 pagesize 1000 column day format a3 column total format 9999 column h00 format 999 column h01 format 999 column h02 format 999 column h03 format 999 column h04 format 999 column h04 format 999 column h05 format 999 column h06 format 999 column h07 format 999 column h08 format 999 column h09 format 999 column h10 format 999 column h11 format 999 column h12 format 999 column h13 format 999 column h14 format 999 column h15 format 999 column h16 format 999 column h17 format 999 column h18 format 999 column h19 format 999 column h20 format 999 column h21 format 999 column h22 format 999 column h23 format 999 column h24 format 999 break on report compute max of "total" on report compute max of "h00" on report compute max of "h01" on report compute max of "h02" on report compute max of "h03" on report compute max of "h04" on report compute max of "h05" on report compute max of "h06" on report compute max of "h07" on report compute max of "h08" on report compute max of "h09" on report compute max of "h10" on report compute max of "h11" on report compute max of "h12" on report compute max of "h13" on report compute max of "h14" on report compute max of "h15" on report compute max of "h16" on report compute max of "h17" on report compute max of "h18" on report compute max of "h19" on report compute max of "h20" on report compute max of "h21" on report compute max of "h22" on report compute max of "h23" on report compute sum of NUM on report compute sum of GB on report compute sum of MB on report compute sum of KB on report PROMPT PROMPT ******************************************************************************************************************************************* PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Size) PROMPT * (Hourly and Daily figures in &&size_label) PROMPT ******************************************************************************************************************************************* PROMPT SELECT to_char(trunc(COMPLETION_TIME),'DD-Mon-YY') "Date", to_char(COMPLETION_TIME, 'Dy') "Day", '|' separator, ROUND(SUM(((BLOCKS * BLOCK_SIZE)/&size_divider))) "Total", '|' separator, ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'00',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h00", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'01',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h01", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'02',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h02", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'03',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h03", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'04',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h04", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'05',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h05", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'06',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h06", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'07',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h07", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'08',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h08", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'09',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h09", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'10',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h10", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'11',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h11", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'12',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h12", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'13',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h13", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'14',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h14", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'15',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h15", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'16',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h16", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'17',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h17", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'18',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h18", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'19',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h19", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'20',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h20", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'21',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h21", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'22',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h22", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'23',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h23" from v$archived_log where 1=1 and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') -- and standby_dest = 'NO' -- and CREATOR IN ('ARCH' , 'FGRD','LGWR','RFS') and first_time > trunc(sysdate-30) group by trunc(COMPLETION_TIME), to_char(COMPLETION_TIME, 'Dy') order by trunc(COMPLETION_TIME) / === SRDC - Collect Data Guard Diagnostic Information (Doc ID 2219763.1) set linesize 300 select time "Day" ,round(max(value/1024),2) "Max_Redo_KB/s",round(avg(value)/1024,2) "Avg_Redo(top30_mins)_KB/s",round(avg(value)*60*20/1024,0) "Recommended_ORL_size_KB" from ( with agg as ( select to_char(BEGIN_TIME,'DD/MON/YYYY') time,value, dense_rank() over (partition by to_char(BEGIN_TIME,'DD/MON/YYYY') order by value desc) as rank from dba_hist_sysmetric_history where metric_name = 'Redo Generated Per Sec' and to_char(BEGIN_TIME,'DD/MON/YYYY') > sysdate -7) select time,value from agg where agg.rank <31 ) group by time order by 1 desc--Script to calculate the archive log size generated per day for each Instances. select THREAD#, trunc(completion_time) as "DATE" , count(1) num , trunc(sum(blocks*block_size)/1024/1024/1024) as GB , trunc(sum(blocks*block_size)/1024/1024) as MB , sum(blocks*block_size)/1024 as KB from v$archived_log where first_time > trunc(sysdate-10) and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') group by thread#, trunc(completion_time) order by 2,1 ; set term off term on DEFINE size_label=Gb DEFINE size_divider="1024/1024/1024" DEFINE round_precision=2 SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000 COLUMN separator HEADING "!|!|!" FORMAT A1 COLUMN "Date" HEADING "Date" FORMAT A9 COLUMN "Total" HEADING "Day|Total|(&size_label)" FORMAT 9999 COLUMN "Day" HEADING "Day" FORMAT A3 COLUMN h0 HEADING "h0|(&size_label)" FORMAT 999 COLUMN h1 HEADING "h1|(&size_label)" FORMAT 999 COLUMN h2 HEADING "h2|(&size_label)" FORMAT 999 COLUMN h3 HEADING "h3|(&size_label)" FORMAT 999 COLUMN h4 HEADING "h4|(&size_label)" FORMAT 999 COLUMN h5 HEADING "h5|(&size_label)" FORMAT 999 COLUMN h6 HEADING "h6|(&size_label)" FORMAT 999 COLUMN h7 HEADING "h7|(&size_label)" FORMAT 999 COLUMN h8 HEADING "h8|(&size_label)" FORMAT 999 COLUMN h9 HEADING "h9|(&size_label)" FORMAT 999 COLUMN h10 HEADING "h10|(&size_label)" FORMAT 999 COLUMN h11 HEADING "h11|(&size_label)" FORMAT 999 COLUMN h12 HEADING "h12|(&size_label)" FORMAT 999 COLUMN h13 HEADING "h13|(&size_label)" FORMAT 999 COLUMN h14 HEADING "h14|(&size_label)" FORMAT 999 COLUMN h15 HEADING "h15|(&size_label)" FORMAT 999 COLUMN h16 HEADING "h16|(&size_label)" FORMAT 999 COLUMN h17 HEADING "h17|(&size_label)" FORMAT 999 COLUMN h18 HEADING "h18|(&size_label)" FORMAT 999 COLUMN h19 HEADING "h19|(&size_label)" FORMAT 999 COLUMN h20 HEADING "h20|(&size_label)" FORMAT 999 COLUMN h21 HEADING "h21|(&size_label)" FORMAT 999 COLUMN h22 HEADING "h22|(&size_label)" FORMAT 999 COLUMN h23 HEADING "h23|(&size_label)" FORMAT 999 PROMPT PROMPT ******************************************************************************************************************************************* PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Size) PROMPT * (Hourly and Daily figures in &&size_label) PROMPT ******************************************************************************************************************************************* PROMPT SELECT to_char(trunc(COMPLETION_TIME),'DD-Mon-YY') "Date", to_char(COMPLETION_TIME, 'Dy') "Day", '|' separator, ROUND(SUM(((BLOCKS * BLOCK_SIZE)/&size_divider))) "Total", '|' separator, ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'00',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h0", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'01',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h1", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'02',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h2", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'03',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h3", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'04',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h4", ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'05',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h5",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'06',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h6",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'07',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h7",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'08',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h8",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'09',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h9",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'10',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h10",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'11',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h11",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'12',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h12",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'13',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h13",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'14',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h14",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'15',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h15",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'16',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h16",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'17',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h17",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'18',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h18",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'19',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h19",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'20',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h20",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'21',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h21",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'22',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h22",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'23',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h23"
from v$archived_log
where 1=1
and standby_dest = 'NO'
-- and CREATOR IN ('ARCH' , 'FGRD','LGWR','RFS')
group by trunc(COMPLETION_TIME), to_char(COMPLETION_TIME, 'Dy')
order by trunc(COMPLETION_TIME)
/
! Day !
! Total ! h0 h1 h2 h3 h4 h5 h6 h7 h8 h9 h10 h11 h12 h13 h14 h15 h16 h17 h18 h19 h20 h21 h22 h23
Date Day ! (Gb) ! (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb) (Gb)
--------- --- - ----- - ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
27-Jan-23 Fri | 110 | 0 10 5 5 25 0 3 3 5 3 5 3 5 5 3 5 5 5 3 5 3 3 3 3
28-Jan-23 Sat | 71 | 3 3 3 0 3 0 3 3 3 3 3 5 3 5 3 5 5 3 5 3 2 5 3 3
29-Jan-23 Sun | 73 | 3 3 0 3 2 0 3 3 3 3 3 5 3 5 3 5 3 5 3 3 5 5 5 3
30-Jan-23 Mon | 150 | 15 38 8 28 1 0 3 3 3 3 5 3 5 3 5 3 5 3 5 3 2 5 3 3
31-Jan-23 Tue | 186 | 8 41 5 18 49 3 3 3 3 3 5 3 5 5 3 5 3 5 5 3 3 5 3 3
set term off term on
SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000
COLUMN separator HEADING "!|!|!" FORMAT A1
COLUMN "Date" HEADING "Date" FORMAT A9
COLUMN "Total" HEADING "Total|(#)" FORMAT 9999
COLUMN "Day" HEADING "Day" FORMAT A3
COLUMN h0 HEADING "h0|(#)" FORMAT 999
COLUMN h1 HEADING "h1|(#)" FORMAT 999
COLUMN h2 HEADING "h2|(#)" FORMAT 999
COLUMN h3 HEADING "h3|(#)" FORMAT 999
COLUMN h4 HEADING "h4|(#)" FORMAT 999
COLUMN h5 HEADING "h5|(#)" FORMAT 999
COLUMN h6 HEADING "h6|(#)" FORMAT 999
COLUMN h7 HEADING "h7|(#)" FORMAT 999
COLUMN h8 HEADING "h8|(#)" FORMAT 999
COLUMN h9 HEADING "h9|(#)" FORMAT 999
COLUMN h10 HEADING "h10|(#)" FORMAT 999
COLUMN h11 HEADING "h11|(#)" FORMAT 999
COLUMN h12 HEADING "h12|(#)" FORMAT 999
COLUMN h13 HEADING "h13|(#)" FORMAT 999
COLUMN h14 HEADING "h14|(#)" FORMAT 999
COLUMN h15 HEADING "h15|(#)" FORMAT 999
COLUMN h16 HEADING "h16|(#)" FORMAT 999
COLUMN h17 HEADING "h17|(#)" FORMAT 999
COLUMN h18 HEADING "h18|(#)" FORMAT 999
COLUMN h19 HEADING "h19|(#)" FORMAT 999
COLUMN h20 HEADING "h20|(#)" FORMAT 999
COLUMN h21 HEADING "h21|(#)" FORMAT 999
COLUMN h22 HEADING "h22|(#)" FORMAT 999
COLUMN h23 HEADING "h23|(#)" FORMAT 999
PROMPT
PROMPT *******************************************************************************************************************************************
PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Frequency)
PROMPT * (Hourly and Daily figures in number of archivelogs)
PROMPT *******************************************************************************************************************************************
PROMPT
SELECT to_char(trunc(first_time),'DD-Mon-YY') "Date",
to_char(first_time, 'Dy') "Day",
'|' separator,
count(1) Total,
'|' separator,
SUM(decode(to_char(first_time, 'hh24'),'00',1,0)) "h0",
SUM(decode(to_char(first_time, 'hh24'),'01',1,0)) "h1",
SUM(decode(to_char(first_time, 'hh24'),'02',1,0)) "h2",
SUM(decode(to_char(first_time, 'hh24'),'03',1,0)) "h3",
SUM(decode(to_char(first_time, 'hh24'),'04',1,0)) "h4",
SUM(decode(to_char(first_time, 'hh24'),'05',1,0)) "h5",
SUM(decode(to_char(first_time, 'hh24'),'06',1,0)) "h6",
SUM(decode(to_char(first_time, 'hh24'),'07',1,0)) "h7",
SUM(decode(to_char(first_time, 'hh24'),'08',1,0)) "h8",
SUM(decode(to_char(first_time, 'hh24'),'09',1,0)) "h9",
SUM(decode(to_char(first_time, 'hh24'),'10',1,0)) "h10",
SUM(decode(to_char(first_time, 'hh24'),'11',1,0)) "h11",
SUM(decode(to_char(first_time, 'hh24'),'12',1,0)) "h12",
SUM(decode(to_char(first_time, 'hh24'),'13',1,0)) "h13",
SUM(decode(to_char(first_time, 'hh24'),'14',1,0)) "h14",
SUM(decode(to_char(first_time, 'hh24'),'15',1,0)) "h15",
SUM(decode(to_char(first_time, 'hh24'),'16',1,0)) "h16",
SUM(decode(to_char(first_time, 'hh24'),'17',1,0)) "h17",
SUM(decode(to_char(first_time, 'hh24'),'18',1,0)) "h18",
SUM(decode(to_char(first_time, 'hh24'),'19',1,0)) "h19",
SUM(decode(to_char(first_time, 'hh24'),'20',1,0)) "h20",
SUM(decode(to_char(first_time, 'hh24'),'21',1,0)) "h21",
SUM(decode(to_char(first_time, 'hh24'),'22',1,0)) "h22",
SUM(decode(to_char(first_time, 'hh24'),'23',1,0)) "h23"
from v$archived_log
where 1=1
and standby_dest = 'NO'
group by trunc(first_time), to_char(first_time, 'Dy')
order by trunc(first_time)
/
! !
! Total ! h0 h1 h2 h3 h4 h5 h6 h7 h8 h9 h10 h11 h12 h13 h14 h15 h16 h17 h18 h19 h20 h21 h22 h23
Date Day ! (#) ! (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#)
--------- --- - ----- - ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
27-Jan-23 Fri | 46 | 0 5 2 2 11 0 1 1 2 1 2 1 2 2 1 2 2 2 1 2 1 1 1 1
28-Jan-23 Sat | 30 | 1 1 1 0 3 0 1 1 1 1 1 2 1 2 1 2 2 1 2 1 1 2 1 1
29-Jan-23 Sun | 30 | 1 1 0 1 2 0 1 1 1 1 1 2 1 2 1 2 1 2 1 1 2 2 2 1
30-Jan-23 Mon | 61 | 6 15 3 11 2 0 1 1 1 1 2 1 2 1 2 1 2 1 2 1 1 2 1 1
31-Jan-23 Tue | 75 | 3 16 2 7 21 1 1 1 1 1 2 1 2 2 1 2 1 2 2 1 1 2 1 1
01-Feb-23 Wed | 70 | 4 12 12 4 11 0 1 2 1 1 2 1 2 3 2 1 2 1 2 1 2 1 1 1
02-Feb-23 Thu | 56 | 4 16 4 7 2 0 1 1 1 1 1 2 1 2 1 2 1 1 2 1 1 2 1 1
==================================================================================================================================================================
set term off term on
SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000
COLUMN separator HEADING "!|!|!" FORMAT A1
COLUMN "Date" HEADING "Date" FORMAT A9
COLUMN "Total" HEADING "Total|(#)" FORMAT 99999
COLUMN "Day" HEADING "Day" FORMAT A3
COLUMN h0 HEADING "h0|(#)" FORMAT 9999
COLUMN h1 HEADING "h1|(#)" FORMAT 9999
COLUMN h2 HEADING "h2|(#)" FORMAT 9999
COLUMN h3 HEADING "h3|(#)" FORMAT 9999
COLUMN h4 HEADING "h4|(#)" FORMAT 9999
COLUMN h5 HEADING "h5|(#)" FORMAT 9999
COLUMN h6 HEADING "h6|(#)" FORMAT 9999
COLUMN h7 HEADING "h7|(#)" FORMAT 9999
COLUMN h8 HEADING "h8|(#)" FORMAT 9999
COLUMN h9 HEADING "h9|(#)" FORMAT 9999
COLUMN h10 HEADING "h10|(#)" FORMAT 9999
COLUMN h11 HEADING "h11|(#)" FORMAT 9999
COLUMN h12 HEADING "h12|(#)" FORMAT 9999
COLUMN h13 HEADING "h13|(#)" FORMAT 9999
COLUMN h14 HEADING "h14|(#)" FORMAT 9999
COLUMN h15 HEADING "h15|(#)" FORMAT 9999
COLUMN h16 HEADING "h16|(#)" FORMAT 9999
COLUMN h17 HEADING "h17|(#)" FORMAT 9999
COLUMN h18 HEADING "h18|(#)" FORMAT 9999
COLUMN h19 HEADING "h19|(#)" FORMAT 9999
COLUMN h20 HEADING "h20|(#)" FORMAT 9999
COLUMN h21 HEADING "h21|(#)" FORMAT 9999
COLUMN h22 HEADING "h22|(#)" FORMAT 9999
COLUMN h23 HEADING "h23|(#)" FORMAT 9999
PROMPT
PROMPT *******************************************************************************************************************************************
PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Frequency)
PROMPT * (Hourly and Daily figures in number of archivelogs)
PROMPT *******************************************************************************************************************************************
PROMPT
SELECT to_char(trunc(first_time),'DD-Mon-YY') "Date",
to_char(first_time, 'Dy') "Day",
'|' separator,
count(1) Total,
'|' separator,
SUM(decode(to_char(first_time, 'hh24'),'00',1,0)) "h0",
SUM(decode(to_char(first_time, 'hh24'),'01',1,0)) "h1",
SUM(decode(to_char(first_time, 'hh24'),'02',1,0)) "h2",
SUM(decode(to_char(first_time, 'hh24'),'03',1,0)) "h3",
SUM(decode(to_char(first_time, 'hh24'),'04',1,0)) "h4",
SUM(decode(to_char(first_time, 'hh24'),'05',1,0)) "h5",
SUM(decode(to_char(first_time, 'hh24'),'06',1,0)) "h6",
SUM(decode(to_char(first_time, 'hh24'),'07',1,0)) "h7",
SUM(decode(to_char(first_time, 'hh24'),'08',1,0)) "h8",
SUM(decode(to_char(first_time, 'hh24'),'09',1,0)) "h9",
SUM(decode(to_char(first_time, 'hh24'),'10',1,0)) "h10",
SUM(decode(to_char(first_time, 'hh24'),'11',1,0)) "h11",
SUM(decode(to_char(first_time, 'hh24'),'12',1,0)) "h12",
SUM(decode(to_char(first_time, 'hh24'),'13',1,0)) "h13",
SUM(decode(to_char(first_time, 'hh24'),'14',1,0)) "h14",
SUM(decode(to_char(first_time, 'hh24'),'15',1,0)) "h15",
SUM(decode(to_char(first_time, 'hh24'),'16',1,0)) "h16",
SUM(decode(to_char(first_time, 'hh24'),'17',1,0)) "h17",
SUM(decode(to_char(first_time, 'hh24'),'18',1,0)) "h18",
SUM(decode(to_char(first_time, 'hh24'),'19',1,0)) "h19",
SUM(decode(to_char(first_time, 'hh24'),'20',1,0)) "h20",
SUM(decode(to_char(first_time, 'hh24'),'21',1,0)) "h21",
SUM(decode(to_char(first_time, 'hh24'),'22',1,0)) "h22",
SUM(decode(to_char(first_time, 'hh24'),'23',1,0)) "h23"
from v$archived_log
where 1=1
and standby_dest = 'NO'
group by trunc(first_time), to_char(first_time, 'Dy')
order by trunc(first_time)
/
==================
SET LINESIZE 250 PAGESIZE 9999 TRIMSPOOL ON VERIFY off
COLUMN H00 FORMAT 99,999 HEADING '00'
COLUMN H01 FORMAT 99,999 HEADING '01'
COLUMN H02 FORMAT 99,999 HEADING '02'
COLUMN H03 FORMAT 99,999 HEADING '03'
COLUMN H04 FORMAT 99,999 HEADING '04'
COLUMN H05 FORMAT 99,999 HEADING '05'
COLUMN H06 FORMAT 99,999 HEADING '06'
COLUMN H07 FORMAT 99,999 HEADING '07'
COLUMN H08 FORMAT 99,999 HEADING '08'
COLUMN H09 FORMAT 99,999 HEADING '09'
COLUMN H10 FORMAT 99,999 HEADING '10'
COLUMN H11 FORMAT 99,999 HEADING '11'
COLUMN H12 FORMAT 99,999 HEADING '12'
COLUMN H13 FORMAT 99,999 HEADING '13'
COLUMN H14 FORMAT 99,999 HEADING '14'
COLUMN H15 FORMAT 99,999 HEADING '15'
COLUMN H16 FORMAT 99,999 HEADING '16'
COLUMN H17 FORMAT 99,999 HEADING '17'
COLUMN H18 FORMAT 99,999 HEADING '18'
COLUMN H19 FORMAT 99,999 HEADING '19'
COLUMN H20 FORMAT 99,999 HEADING '20'
COLUMN H21 FORMAT 99,999 HEADING '21'
COLUMN H22 FORMAT 99,999 HEADING '22'
COLUMN H23 FORMAT 99,999 HEADING '23'
COLUMN TOTAL FORMAT 999,999 HEADING 'Total'
SELECT
SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH:MI:SS'),1,5) DAY
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'00',ROUND(((blocks*block_size)/1024/1024)),0)) H00
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'01',ROUND(((blocks*block_size)/1024/1024)),0)) H01
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'02',ROUND(((blocks*block_size)/1024/1024)),0)) H02
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'03',ROUND(((blocks*block_size)/1024/1024)),0)) H03
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'04',ROUND(((blocks*block_size)/1024/1024)),0)) H04
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'05',ROUND(((blocks*block_size)/1024/1024)),0)) H05
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'06',ROUND(((blocks*block_size)/1024/1024)),0)) H06
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'07',ROUND(((blocks*block_size)/1024/1024)),0)) H07
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'08',ROUND(((blocks*block_size)/1024/1024)),0)) H08
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'09',ROUND(((blocks*block_size)/1024/1024)),0)) H09
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'10',ROUND(((blocks*block_size)/1024/1024)),0)) H10
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'11',ROUND(((blocks*block_size)/1024/1024)),0)) H11
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'12',ROUND(((blocks*block_size)/1024/1024)),0)) H12
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'13',ROUND(((blocks*block_size)/1024/1024)),0)) H13
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'14',ROUND(((blocks*block_size)/1024/1024)),0)) H14
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'15',ROUND(((blocks*block_size)/1024/1024)),0)) H15
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'16',ROUND(((blocks*block_size)/1024/1024)),0)) H16
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'17',ROUND(((blocks*block_size)/1024/1024)),0)) H17
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'18',ROUND(((blocks*block_size)/1024/1024)),0)) H18
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'19',ROUND(((blocks*block_size)/1024/1024)),0)) H19
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'20',ROUND(((blocks*block_size)/1024/1024)),0)) H20
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'21',ROUND(((blocks*block_size)/1024/1024)),0)) H21
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'22',ROUND(((blocks*block_size)/1024/1024)),0)) H22
, SUM(DECODE(SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH24:MI:SS'),10,2),'23',ROUND(((blocks*block_size)/1024/1024)),0)) H23
, ROUND(SUM((blocks*block_size)/1024/1024)) TOTAL
FROM
v$archived_log a
GROUP BY SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH:MI:SS'),1,5)
ORDER BY SUBSTR(TO_CHAR(first_time, 'MM/DD/RR HH:MI:SS'),1,5)
/
============
-- | FILE : perf_log_switch_history_count_daily_7.sql |
-- | CLASS : Tuning |
-- | PURPOSE : Reports on how often log switches occur in your database on a |
-- | daily basis. This script is to be used with an Oracle 7 |
-- | database. |
-- | NOTE : As with any code, ensure to test this script in a development |
-- | environment before attempting to run it in production. |
-- +----------------------------------------------------------------------------+
SET LINESIZE 145
SET PAGESIZE 9999
SET VERIFY off
ACCEPT startDate PROMPT 'Enter start date (DD-MON-YYYY): '
ACCEPT endDate PROMPT 'Enter end date (DD-MON-YYYY): '
COLUMN H00 FORMAT 999 HEADING '00'
COLUMN H01 FORMAT 999 HEADING '01'
COLUMN H02 FORMAT 999 HEADING '02'
COLUMN H03 FORMAT 999 HEADING '03'
COLUMN H04 FORMAT 999 HEADING '04'
COLUMN H05 FORMAT 999 HEADING '05'
COLUMN H06 FORMAT 999 HEADING '06'
COLUMN H07 FORMAT 999 HEADING '07'
COLUMN H08 FORMAT 999 HEADING '08'
COLUMN H09 FORMAT 999 HEADING '09'
COLUMN H10 FORMAT 999 HEADING '10'
COLUMN H11 FORMAT 999 HEADING '11'
COLUMN H12 FORMAT 999 HEADING '12'
COLUMN H13 FORMAT 999 HEADING '13'
COLUMN H14 FORMAT 999 HEADING '14'
COLUMN H15 FORMAT 999 HEADING '15'
COLUMN H16 FORMAT 999 HEADING '16'
COLUMN H17 FORMAT 999 HEADING '17'
COLUMN H18 FORMAT 999 HEADING '18'
COLUMN H19 FORMAT 999 HEADING '19'
COLUMN H20 FORMAT 999 HEADING '20'
COLUMN H21 FORMAT 999 HEADING '21'
COLUMN H22 FORMAT 999 HEADING '22'
COLUMN H23 FORMAT 999 HEADING '23'
COLUMN TOTAL FORMAT 999,999 HEADING 'Total'
SELECT
SUBSTR(time,1,5) DAY
, SUM(DECODE(SUBSTR(time,10,2),'00',1,0)) H00
, SUM(DECODE(SUBSTR(time,10,2),'01',1,0)) H01
, SUM(DECODE(SUBSTR(time,10,2),'02',1,0)) H02
, SUM(DECODE(SUBSTR(time,10,2),'03',1,0)) H03
, SUM(DECODE(SUBSTR(time,10,2),'04',1,0)) H04
, SUM(DECODE(SUBSTR(time,10,2),'05',1,0)) H05
, SUM(DECODE(SUBSTR(time,10,2),'06',1,0)) H06
, SUM(DECODE(SUBSTR(time,10,2),'07',1,0)) H07
, SUM(DECODE(SUBSTR(time,10,2),'08',1,0)) H08
, SUM(DECODE(SUBSTR(time,10,2),'09',1,0)) H09
, SUM(DECODE(SUBSTR(time,10,2),'10',1,0)) H10
, SUM(DECODE(SUBSTR(time,10,2),'11',1,0)) H11
, SUM(DECODE(SUBSTR(time,10,2),'12',1,0)) H12
, SUM(DECODE(SUBSTR(time,10,2),'13',1,0)) H13
, SUM(DECODE(SUBSTR(time,10,2),'14',1,0)) H14
, SUM(DECODE(SUBSTR(time,10,2),'15',1,0)) H15
, SUM(DECODE(SUBSTR(time,10,2),'16',1,0)) H16
, SUM(DECODE(SUBSTR(time,10,2),'17',1,0)) H17
, SUM(DECODE(SUBSTR(time,10,2),'18',1,0)) H18
, SUM(DECODE(SUBSTR(time,10,2),'19',1,0)) H19
, SUM(DECODE(SUBSTR(time,10,2),'20',1,0)) H20
, SUM(DECODE(SUBSTR(time,10,2),'21',1,0)) H21
, SUM(DECODE(SUBSTR(time,10,2),'22',1,0)) H22
, SUM(DECODE(SUBSTR(time,10,2),'23',1,0)) H23
, COUNT(*) TOTAL
FROM
v$log_history a
WHERE
(TO_DATE(SUBSTR(time, 1,8), 'MM/DD/RR')
>=
TO_DATE('&startDate', 'DD-MON-YYYY')
)
AND
(TO_DATE(SUBSTR(time, 1,8), 'MM/DD/RR')
<=
TO_DATE('&endDate', 'DD-MON-YYYY')
)
GROUP BY SUBSTR(time,1,5)
/
=============
-- | FILE : perf_file_io_efficiency.sql |
-- | CLASS : Tuning |
-- | NOTE : As with any code, ensure to test this script in a development |
-- | environment before attempting to run it in production. |
-- +----------------------------------------------------------------------------+
SET LINESIZE 145
SET PAGESIZE 9999
COLUMN ts FORMAT a15 HEADING 'Tablespace'
COLUMN fn FORMAT a38 HEADING 'Filename'
COLUMN rds HEADING 'Reads'
COLUMN blk_rds HEADING 'Block Reads'
COLUMN wrts HEADING 'Writes'
COLUMN blk_wrts HEADING 'Block Writes'
COLUMN rw HEADING 'Reads+Writes'
COLUMN blk_rw HEADING 'Block Reads+Writes'
COLUMN eff FORMAT a10 HEADING 'Effeciency'
SELECT
f.tablespace_name ts
, f.file_name fn
, v.phyrds rds
, v.phyblkrd blk_rds
, v.phywrts wrts
, v.phyblkwrt blk_wrts
, v.phyrds + v.phywrts rw
, v.phyblkrd + v.phyblkwrt blk_rw
, DECODE(v.phyblkrd, 0, null, ROUND(100*(v.phyrds + v.phywrts)/(v.phyblkrd + v.phyblkwrt), 2)) eff
FROM
dba_data_files f
, v$filestat v
WHERE
f.file_id = v.file#
ORDER BY
rds
/
set term off term on
DEFINE size_label=Gb
DEFINE size_divider="1024/1024/1024"
DEFINE round_precision=2
SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000
COLUMN separator HEADING "!|!|!" FORMAT A1
COLUMN "Date" HEADING "Date" FORMAT A9
COLUMN "Total" HEADING "Day|Total|(&size_label)" FORMAT 9999
COLUMN "Day" HEADING "Day" FORMAT A3
COLUMN h0 HEADING "h0|(&size_label)" FORMAT 999
COLUMN h1 HEADING "h1|(&size_label)" FORMAT 999
COLUMN h2 HEADING "h2|(&size_label)" FORMAT 999
COLUMN h3 HEADING "h3|(&size_label)" FORMAT 999
COLUMN h4 HEADING "h4|(&size_label)" FORMAT 999
COLUMN h5 HEADING "h5|(&size_label)" FORMAT 999
COLUMN h6 HEADING "h6|(&size_label)" FORMAT 999
COLUMN h7 HEADING "h7|(&size_label)" FORMAT 999
COLUMN h8 HEADING "h8|(&size_label)" FORMAT 999
COLUMN h9 HEADING "h9|(&size_label)" FORMAT 999
COLUMN h10 HEADING "h10|(&size_label)" FORMAT 999
COLUMN h11 HEADING "h11|(&size_label)" FORMAT 999
COLUMN h12 HEADING "h12|(&size_label)" FORMAT 999
COLUMN h13 HEADING "h13|(&size_label)" FORMAT 999
COLUMN h14 HEADING "h14|(&size_label)" FORMAT 999
COLUMN h15 HEADING "h15|(&size_label)" FORMAT 999
COLUMN h16 HEADING "h16|(&size_label)" FORMAT 999
COLUMN h17 HEADING "h17|(&size_label)" FORMAT 999
COLUMN h18 HEADING "h18|(&size_label)" FORMAT 999
COLUMN h19 HEADING "h19|(&size_label)" FORMAT 999
COLUMN h20 HEADING "h20|(&size_label)" FORMAT 999
COLUMN h21 HEADING "h21|(&size_label)" FORMAT 999
COLUMN h22 HEADING "h22|(&size_label)" FORMAT 999
COLUMN h23 HEADING "h23|(&size_label)" FORMAT 999
PROMPT
PROMPT *******************************************************************************************************************************************
PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Size)
PROMPT * (Hourly and Daily figures in &&size_label)
PROMPT *******************************************************************************************************************************************
PROMPT
SELECT to_char(trunc(COMPLETION_TIME),'DD-Mon-YY') "Date",
to_char(COMPLETION_TIME, 'Dy') "Day",
'|' separator,
ROUND(SUM(((BLOCKS * BLOCK_SIZE)/&size_divider))) "Total",
'|' separator,
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'00',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h0",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'01',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h1",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'02',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h2",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'03',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h3",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'04',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h4",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'05',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h5",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'06',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h6",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'07',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h7",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'08',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h8",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'09',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h9",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'10',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h10",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'11',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h11",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'12',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h12",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'13',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h13",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'14',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h14",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'15',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h15",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'16',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h16",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'17',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h17",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'18',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h18",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'19',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h19",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'20',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h20",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'21',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h21",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'22',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h22",
ROUND(SUM(decode(to_char(COMPLETION_TIME, 'hh24'),'23',(BLOCKS * BLOCK_SIZE)/&size_divider,0))) "h23"
from v$archived_log
where 1=1
and standby_dest = 'NO'
-- and CREATOR IN ('ARCH' , 'FGRD','LGWR','RFS')
group by trunc(COMPLETION_TIME), to_char(COMPLETION_TIME, 'Dy')
order by trunc(COMPLETION_TIME)
/
! Day !
! Total ! h0 h1 h2 h3 h4 h5 h6 h7 h8 h9 h10 h11 h12 h13 h14 h15 h16 h17 h18 h19 h20 h21 h22 h23
Date Day ! (GB) ! (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB) (GB)
--------- --- - ----- - ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
17-Oct-14 Fri | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
18-Oct-14 Sat | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
19-Oct-14 Sun | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
25-Oct-14 Sat | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
02-Nov-14 Sun | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
08-Nov-14 Sat | 0 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
set term off term on
SET head on FEED off ECHO OFF LINES 1000 TRIMSPOOL ON TRIM on PAGES 1000
COLUMN separator HEADING "!|!|!" FORMAT A1
COLUMN "Date" HEADING "Date" FORMAT A9
COLUMN "Total" HEADING "Total|(#)" FORMAT 9999
COLUMN "Day" HEADING "Day" FORMAT A3
COLUMN h0 HEADING "h0|(#)" FORMAT 999
COLUMN h1 HEADING "h1|(#)" FORMAT 999
COLUMN h2 HEADING "h2|(#)" FORMAT 999
COLUMN h3 HEADING "h3|(#)" FORMAT 999
COLUMN h4 HEADING "h4|(#)" FORMAT 999
COLUMN h5 HEADING "h5|(#)" FORMAT 999
COLUMN h6 HEADING "h6|(#)" FORMAT 999
COLUMN h7 HEADING "h7|(#)" FORMAT 999
COLUMN h8 HEADING "h8|(#)" FORMAT 999
COLUMN h9 HEADING "h9|(#)" FORMAT 999
COLUMN h10 HEADING "h10|(#)" FORMAT 999
COLUMN h11 HEADING "h11|(#)" FORMAT 999
COLUMN h12 HEADING "h12|(#)" FORMAT 999
COLUMN h13 HEADING "h13|(#)" FORMAT 999
COLUMN h14 HEADING "h14|(#)" FORMAT 999
COLUMN h15 HEADING "h15|(#)" FORMAT 999
COLUMN h16 HEADING "h16|(#)" FORMAT 999
COLUMN h17 HEADING "h17|(#)" FORMAT 999
COLUMN h18 HEADING "h18|(#)" FORMAT 999
COLUMN h19 HEADING "h19|(#)" FORMAT 999
COLUMN h20 HEADING "h20|(#)" FORMAT 999
COLUMN h21 HEADING "h21|(#)" FORMAT 999
COLUMN h22 HEADING "h22|(#)" FORMAT 999
COLUMN h23 HEADING "h23|(#)" FORMAT 999
PROMPT
PROMPT *******************************************************************************************************************************************
PROMPT * A R C H I V E L O G S W I T C H S U M M A R Y (By Frequency)
PROMPT * (Hourly and Daily figures in number of archivelogs)
PROMPT *******************************************************************************************************************************************
PROMPT
PROMPT -
SELECT to_char(trunc(first_time),'DD-Mon-YY') "Date",
to_char(first_time, 'Dy') "Day",
'|' separator,
count(1) Total,
'|' separator,
SUM(decode(to_char(first_time, 'hh24'),'00',1,0)) "h0",
SUM(decode(to_char(first_time, 'hh24'),'01',1,0)) "h1",
SUM(decode(to_char(first_time, 'hh24'),'02',1,0)) "h2",
SUM(decode(to_char(first_time, 'hh24'),'03',1,0)) "h3",
SUM(decode(to_char(first_time, 'hh24'),'04',1,0)) "h4",
SUM(decode(to_char(first_time, 'hh24'),'05',1,0)) "h5",
SUM(decode(to_char(first_time, 'hh24'),'06',1,0)) "h6",
SUM(decode(to_char(first_time, 'hh24'),'07',1,0)) "h7",
SUM(decode(to_char(first_time, 'hh24'),'08',1,0)) "h8",
SUM(decode(to_char(first_time, 'hh24'),'09',1,0)) "h9",
SUM(decode(to_char(first_time, 'hh24'),'10',1,0)) "h10",
SUM(decode(to_char(first_time, 'hh24'),'11',1,0)) "h11",
SUM(decode(to_char(first_time, 'hh24'),'12',1,0)) "h12",
SUM(decode(to_char(first_time, 'hh24'),'13',1,0)) "h13",
SUM(decode(to_char(first_time, 'hh24'),'14',1,0)) "h14",
SUM(decode(to_char(first_time, 'hh24'),'15',1,0)) "h15",
SUM(decode(to_char(first_time, 'hh24'),'16',1,0)) "h16",
SUM(decode(to_char(first_time, 'hh24'),'17',1,0)) "h17",
SUM(decode(to_char(first_time, 'hh24'),'18',1,0)) "h18",
SUM(decode(to_char(first_time, 'hh24'),'19',1,0)) "h19",
SUM(decode(to_char(first_time, 'hh24'),'20',1,0)) "h20",
SUM(decode(to_char(first_time, 'hh24'),'21',1,0)) "h21",
SUM(decode(to_char(first_time, 'hh24'),'22',1,0)) "h22",
SUM(decode(to_char(first_time, 'hh24'),'23',1,0)) "h23"
from v$log_history
where 1=1
-- and standby_dest = 'NO'
group by trunc(first_time), to_char(first_time, 'Dy')
order by trunc(first_time)
/
! !
! Total ! h0 h1 h2 h3 h4 h5 h6 h7 h8 h9 h10 h11 h12 h13 h14 h15 h16 h17 h18 h19 h20 h21 h22 h23
Date Day ! (#) ! (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#) (#)
--------- --- - ----- - ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
25-Sep-14 Thu | 4 | 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0
26-Sep-14 Fri | 6 | 0 0 0 0 0 0 0 0 2 1 0 2 1 0 0 0 0 0 0 0 0 0 0 0
27-Sep-14 Sat | 4 | 0 0 0 0 0 0 0 0 0 3 0 1 0 0 0 0 0 0 0 0 0 0 0 0
05-Oct-14 Sun | 2 | 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0
17-Oct-14 Fri | 1 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0
18-Oct-14 Sat | 10 | 0 0 0 0 0 0 0 0 0 0 0 1 5 4 0 0 0 0 0 0 0 0 0 0
19-Oct-14 Sun | 8 | 0 0 0 0 0 0 0 0 2 3 2 1 0 0 0 0 0 0 0 0 0 0 0 0
25-Oct-14 Sat | 1 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0
02-Nov-14 Sun | 4 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 0 0 0 0 0
---instance wise
set pages 500 lines 500
col h0 format 9999
col h1 format 9999
col h2 format 9999
col h3 format 9999
col h4 format 9999
col h5 format 9999
col h6 format 9999
col h7 format 9999
col h8 format 9999
col h9 format 9999
col h10 format 9999
col h11 format 9999
col h12 format 9999
col h13 format 9999
col h14 format 9999
col h15 format 9999
col h16 format 9999
col h17 format 9999
col h18 format 9999
col h19 format 9999
col h20 format 9999
col h21 format 9999
col h22 format 9999
col h23 format 9999
SELECT TRUNC (first_time) "Date", inst_id, TO_CHAR (first_time, 'Dy') "Day",
COUNT (1) "Total",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '00', 1, 0)) "h0",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '01', 1, 0)) "h1",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '02', 1, 0)) "h2",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '03', 1, 0)) "h3",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '04', 1, 0)) "h4",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '05', 1, 0)) "h5",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '06', 1, 0)) "h6",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '07', 1, 0)) "h7",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '08', 1, 0)) "h8",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '09', 1, 0)) "h9",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '10', 1, 0)) "h10",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '11', 1, 0)) "h11",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '12', 1, 0)) "h12",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '13', 1, 0)) "h13",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '14', 1, 0)) "h14",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '15', 1, 0)) "h15",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '16', 1, 0)) "h16",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '17', 1, 0)) "h17",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '18', 1, 0)) "h18",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '19', 1, 0)) "h19",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '20', 1, 0)) "h20",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '21', 1, 0)) "h21",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '22', 1, 0)) "h22",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '23', 1, 0)) "h23",
ROUND (COUNT (1) / 24, 2) "Avg"
FROM gv$log_history
WHERE thread# = inst_id
AND first_time > sysdate -14
GROUP BY TRUNC (first_time), inst_id, TO_CHAR (first_time, 'Dy')
ORDER BY 1,2;
=====
http://anuj-singh.blogspot.com/2011_10_31_archive.html
set heading on
set echo off
set linesize 150
set pagesize 500
column day format a16 heading 'Day'
column d_0 format a3 heading '00'
column d_1 format a3 heading '01'
column d_2 format a3 heading '02'
column d_3 format a3 heading '03'
column d_4 format a3 heading '04'
column d_5 format a3 heading '05'
column d_6 format a3 heading '06'
column d_7 format a3 heading '07'
column d_8 format a3 heading '08'
column d_9 format a3 heading '09'
column d_10 format a3 heading '10'
column d_11 format a3 heading '11'
column d_12 format a3 heading '12'
column d_13 format a3 heading '13'
column d_14 format a3 heading '14'
column d_15 format a3 heading '15'
column d_16 format a3 heading '16'
column d_17 format a3 heading '17'
column d_18 format a3 heading '18'
column d_19 format a3 heading '19'
column d_20 format a3 heading '20'
column d_21 format a3 heading '21'
column d_22 format a3 heading '22'
column d_23 format a3 heading '23'
column Total format 99999999
column status format a8
column member format a80
column archived heading 'Archived' format a8
column bytes heading 'Bytes|(MB)' format 99999999
col group# format 999
Ttitle 'Log Info' skip 2
select l.group#,f.member,l.archived,l.bytes/1078576 bytes,l.status,f.type
from v$log l, v$logfile f
where l.group# = f.group#
/
Ttitle off
prompt =========================================================================================================================
Ttitle 'Log Switch on hour basis' skip 2
select to_char(FIRST_TIME,'DY, DD-MON-YYYY') day,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'00',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'00',1,0))) d_0,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'01',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'01',1,0))) d_1,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'02',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'02',1,0))) d_2,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'03',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'03',1,0))) d_3,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'04',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'04',1,0))) d_4,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'05',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'05',1,0))) d_5,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'06',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'06',1,0))) d_6,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'07',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'07',1,0))) d_7,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'08',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'08',1,0))) d_5,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'09',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'09',1,0))) d_9,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'10',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'10',1,0))) d_10,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'11',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'11',1,0))) d_11,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'12',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'12',1,0))) d_12,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'13',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'13',1,0))) d_13,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'14',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'14',1,0))) d_14,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'15',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'15',1,0))) d_15,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'16',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'16',1,0))) d_16,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'17',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'17',1,0))) d_17,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'18',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'18',1,0))) d_18,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'19',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'19',1,0))) d_19,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'20',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'20',1,0))) d_20,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'21',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'21',1,0))) d_21,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'22',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'22',1,0))) d_22,
decode(sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'23',1,0)),0,'-',sum(decode(substr(to_char(FIRST_TIME,'HH24'),1,2),'23',1,0))) d_23,
count(trunc(FIRST_TIME)) Total
from v$log_history
group by to_char(FIRST_TIME,'DY, DD-MON-YYYY')
order by to_date(substr(to_char(FIRST_TIME,'DY, DD-MON-YYYY'),5,15) )
/
set linesize 200 pagesize 1000
column day format a3
column total format 9999
column h00 format 999
column h01 format 999
column h02 format 999
column h03 format 999
column h04 format 999
column h04 format 999
column h05 format 999
column h06 format 999
column h07 format 999
column h08 format 999
column h09 format 999
column h10 format 999
column h11 format 999
column h12 format 999
column h13 format 999
column h14 format 999
column h15 format 999
column h16 format 999
column h17 format 999
column h18 format 999
column h19 format 999
column h20 format 999
column h21 format 999
column h22 format 999
column h23 format 999
column h24 format 999
break on report
compute max of "total" on report
compute max of "h00" on report
compute max of "h01" on report
compute max of "h02" on report
compute max of "h03" on report
compute max of "h04" on report
compute max of "h05" on report
compute max of "h06" on report
compute max of "h07" on report
compute max of "h08" on report
compute max of "h09" on report
compute max of "h10" on report
compute max of "h11" on report
compute max of "h12" on report
compute max of "h13" on report
compute max of "h14" on report
compute max of "h15" on report
compute max of "h16" on report
compute max of "h17" on report
compute max of "h18" on report
compute max of "h19" on report
compute max of "h20" on report
compute max of "h21" on report
compute max of "h22" on report
compute max of "h23" on report
compute sum of NUM on report
compute sum of GB on report
compute sum of MB on report
compute sum of KB on report
REM Script to Report the Redo Log Switch History
alter session set nls_date_format='DD MON YYYY';
select thread#, trunc(completion_time) as "date", to_char(completion_time,'Dy') as "Day", count(1) as "total",
sum(decode(to_char(completion_time,'HH24'),'00',1,0)) as "h00",
sum(decode(to_char(completion_time,'HH24'),'01',1,0)) as "h01",
sum(decode(to_char(completion_time,'HH24'),'02',1,0)) as "h02",
sum(decode(to_char(completion_time,'HH24'),'03',1,0)) as "h03",
sum(decode(to_char(completion_time,'HH24'),'04',1,0)) as "h04",
sum(decode(to_char(completion_time,'HH24'),'05',1,0)) as "h05",
sum(decode(to_char(completion_time,'HH24'),'06',1,0)) as "h06",
sum(decode(to_char(completion_time,'HH24'),'07',1,0)) as "h07",
sum(decode(to_char(completion_time,'HH24'),'08',1,0)) as "h08",
sum(decode(to_char(completion_time,'HH24'),'09',1,0)) as "h09",
sum(decode(to_char(completion_time,'HH24'),'10',1,0)) as "h10",
sum(decode(to_char(completion_time,'HH24'),'11',1,0)) as "h11",
sum(decode(to_char(completion_time,'HH24'),'12',1,0)) as "h12",
sum(decode(to_char(completion_time,'HH24'),'13',1,0)) as "h13",
sum(decode(to_char(completion_time,'HH24'),'14',1,0)) as "h14",
sum(decode(to_char(completion_time,'HH24'),'15',1,0)) as "h15",
sum(decode(to_char(completion_time,'HH24'),'16',1,0)) as "h16",
sum(decode(to_char(completion_time,'HH24'),'17',1,0)) as "h17",
sum(decode(to_char(completion_time,'HH24'),'18',1,0)) as "h18",
sum(decode(to_char(completion_time,'HH24'),'19',1,0)) as "h19",
sum(decode(to_char(completion_time,'HH24'),'20',1,0)) as "h20",
sum(decode(to_char(completion_time,'HH24'),'21',1,0)) as "h21",
sum(decode(to_char(completion_time,'HH24'),'22',1,0)) as "h22",
sum(decode(to_char(completion_time,'HH24'),'23',1,0)) as "h23"
from
v$archived_log
where first_time > trunc(sysdate-10)
and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL')
group by thread#, trunc(completion_time), to_char(completion_time, 'Dy') order by 2,1;
REM Script to calculate the archive log size generated per day for each Instances.
select THREAD#, trunc(completion_time) as "DATE"
, count(1) num
, trunc(sum(blocks*block_size)/1024/1024/1024) as GB
, trunc(sum(blocks*block_size)/1024/1024) as MB
, sum(blocks*block_size)/1024 as KB
from v$archived_log
where first_time > trunc(sysdate-10)
and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL')
group by thread#, trunc(completion_time)
order by 2,1
;
THREAD# DATE NUM GB MB KB
---------- ----------- ---------- ---------- ---------- ----------
1 10 MAY 2023 6 1 1924 1970919
2 10 MAY 2023 8 2 2994 3065932.5
1 11 MAY 2023 9 2 2972 3043817.5
2 11 MAY 2023 7 2 2200 2253288
1 12 MAY 2023 8 2 2571 2633673
2 12 MAY 2023 8 2 2762 2828996.5
1 13 MAY 2023 6 1 1683 1724133
2 13 MAY 2023 8 2 2760 2826771.5
1 14 MAY 2023 9 3 3376 3457436.5
2 14 MAY 2023 8 2 2448 2507321
1 15 MAY 2023 6 1 1673 1713856.5
2 15 MAY 2023 8 2 2551 2612875.5
1 16 MAY 2023 7 2 2344 2401019
2 16 MAY 2023 6 1 1854 1899008
1 17 MAY 2023 6 1 1804 1847339
2 17 MAY 2023 6 1 1900 1946536.5
1 18 MAY 2023 7 2 2188 2240819.5
2 18 MAY 2023 6 1 1986 2033785
1 19 MAY 2023 8 2 2774 2841225
2 19 MAY 2023 9 3 3106 3181476
1 20 MAY 2023 4 1 1090 1116758.5
2 20 MAY 2023 5 1 1442 1477387
---------- ---------- ---------- ----------
sum 155 37 50402 51624374
22 rows selected.
====================================
--- Backup info
set linesize 500 pagesize 300
col BACKUP_SIZE for a20
col START_TIME for a25
col END_TIME for a25
SELECT
INPUT_TYPE "BACKUP_TYPE",
--NVL(INPUT_BYTES/(1024*1024),0)"INPUT_BYTES(MB)",
--NVL(OUTPUT_BYTES/(1024*1024),0) "OUTPUT_BYTES(MB)",
STATUS,
TO_CHAR(START_TIME,'dd/MM/YYYY:hh24:mi:ss') as START_TIME,
TO_CHAR(END_TIME,'dd/MM/YYYY:hh24:mi:ss') as END_TIME,
TRUNC((ELAPSED_SECONDS/60),2) "ELAPSED_TIME(Min)",
--ROUND(COMPRESSION_RATIO,3)"COMPRESSION_RATIO",
--ROUND(INPUT_BYTES_PER_SEC/(1024*1024),2) "INPUT_BYTES_PER_SEC(MB)",
--ROUND(OUTPUT_BYTES_PER_SEC/(1024*1024),2) "OUTPUT_BYTES_PER_SEC(MB)",
--INPUT_BYTES_DISPLAY "INPUT_BYTES_DISPLAY",
OUTPUT_BYTES_DISPLAY "BACKUP_SIZE",
OUTPUT_DEVICE_TYPE "OUTPUT_DEVICE"
--INPUT_BYTES_PER_SEC_DISPLAY "INPUT_BYTES_PER_SEC_DIS",
--OUTPUT_BYTES_PER_SEC_DISPLAY "OUTPUT_BYTES_PER_SEC_DIS"
FROM V$RMAN_BACKUP_JOB_DETAILS
where start_time > SYSDATE -10
and INPUT_TYPE != 'ARCHIVELOG'
ORDER BY END_TIME DESC
/
set linesize 500 pagesize 300
col BACKUP_SIZE for a20
col START_TIME for a25
col END_TIME for a25
SELECT
INPUT_TYPE "BACKUP_TYPE",
--NVL(INPUT_BYTES/(1024*1024),0)"INPUT_BYTES(MB)",
--NVL(OUTPUT_BYTES/(1024*1024),0) "OUTPUT_BYTES(MB)",
STATUS,
TO_CHAR(START_TIME,'dd/MM/YYYY:hh24:mi:ss') as START_TIME,
TO_CHAR(END_TIME,'dd/MM/YYYY:hh24:mi:ss') as END_TIME,
TRUNC((ELAPSED_SECONDS/60),2) "ELAPSED_TIME(Min)",
--ROUND(COMPRESSION_RATIO,3)"COMPRESSION_RATIO",
--ROUND(INPUT_BYTES_PER_SEC/(1024*1024),2) "INPUT_BYTES_PER_SEC(MB)",
--ROUND(OUTPUT_BYTES_PER_SEC/(1024*1024),2) "OUTPUT_BYTES_PER_SEC(MB)",
--INPUT_BYTES_DISPLAY "INPUT_BYTES_DISPLAY",
OUTPUT_BYTES_DISPLAY "BACKUP_SIZE",
OUTPUT_DEVICE_TYPE "OUTPUT_DEVICE"
--INPUT_BYTES_PER_SEC_DISPLAY "INPUT_BYTES_PER_SEC_DIS",
--OUTPUT_BYTES_PER_SEC_DISPLAY "OUTPUT_BYTES_PER_SEC_DIS"
FROM V$RMAN_BACKUP_JOB_DETAILS
where start_time > SYSDATE -10
and INPUT_TYPE = 'ARCHIVELOG'
ORDER BY END_TIME DESC
/
set pagesize 300
select trunc(l.first_time) arch_date
, round(sum(decode(l.status,'D',blocks * block_size,0)/(1024 * 1024 * 1024))) deleted_gb
, round(sum(decode(l.status,'A',blocks * block_size,0)/(1024 * 1024 * 1024))) available_gb
, round(sum(decode(l.status,'U',blocks * block_size,0)/(1024 * 1024 * 1024))) unavailable_gb
, round(sum(decode(l.status,'X',blocks * block_size,0)/(1024 * 1024 * 1024))) expired_gb
, round(sum(blocks * block_size) /(1024 * 1024 * 1024)) total_size_gb
from gv$archived_log l
, gv$archive_dest n
where l.inst_id = n.inst_id
and l.dest_id = n.dest_id
and n.destination = 'USE_DB_RECOVERY_FILE_DEST'
group by trunc(l.first_time)
order by trunc(l.first_time)
/
set pagesize 3000 linesize 300
col name format a78
col thread# format 999999
col archived format a8
col applied format a7
col deleted format a7
col status format a11
alter session set nls_date_format='dd/mm/yyyy hh24:mi:ss';
select name,
sequence#,
thread#,
first_time,
next_time,
completion_time,
blocks,
archived,
applied,
deleted,
decode(status, 'A', 'Available', 'D', 'Deleted', 'U', 'Unavailable', 'X', 'Expired') status
from v$archived_log
where 1=1
-- name is not null
and creator='ARCH'
and completion_time> sysdate -2
order by 2;
set pagesize 3000 linesize 300
col name format a78
col thread# format 999999
col archived format a8
col applied format a7
col deleted format a7
col status format a11
col FIRST_CHANGE# for 9999999999999999
col NEXT_CHANGE# for 9999999999999999
alter session set nls_date_format='dd/mm/yyyy hh24:mi:ss';
select name,
sequence#,
thread#,
FIRST_CHANGE#,
NEXT_CHANGE#,
first_time,
next_time,
completion_time,
blocks,
archived,
applied,
deleted,
decode(status, 'A', 'Available', 'D', 'Deleted', 'U', 'Unavailable', 'X', 'Expired') status
from v$archived_log
where 1=1
-- name is not null
and creator='ARCH'
--and completion_time> sysdate -2
AND completion_time BETWEEN to_date('30-06-23 23:45','DD-MM-YY HH24:MI') AND to_date('01-07-23 00:15','DD-MM-YY HH24:MI')
order by 2;
set pages 1000
alter session set nls_date_format = 'DD-MON-YYYY HH24:MI:SS';
Prompt Daily Archive Log Generation :
select trunc(COMPLETION_TIME,'DD') Day, thread#, round(sum(BLOCKS*BLOCK_SIZE)/1024/1024/1024) GB,count(*) Archives_Generated from v$archived_log
group by trunc(COMPLETION_TIME,'DD'),thread#
order by 1;
Prompt Hr Archive Log Generation :
select trunc(COMPLETION_TIME,'HH') Hour,thread# , round(sum(BLOCKS*BLOCK_SIZE)/1024/1024/1024) GB,count(*) Archives from v$archived_log
group by trunc(COMPLETION_TIME,'HH'),thread#
order by 1 ;
delete archivelog until time "to_date('02.01.2020 22:00:00', 'dd.mm.yyyy hh24:mi:ss')";
alter session set nls_date_format = 'DD-MON-YYYY HH24:MI:SS' ;
set linesize 300
set numf 9999999999999999
col INPUT_BYTES_DISPLAY for a10
col OUTPUT_BYTES_DISPLAY for a10
col MIN_FIRST_TIME for a25
col MAX_NEXT_TIME for a25
select * from v$backup_archivelog_summary;
COLUMN thread# FORMAT 999 HEADING 'Thrd#'
COLUMN sequence# FORMAT 99999 HEADING 'Seq#'
COLUMN first_change# FORMAT 99999999999999 HEADING 'SCN Low#'
COLUMN next_change# HEADING 'SCN High#'
COLUMN archive_name FORMAT a50 HEADING 'Log File'
COLUMN first_time FORMAT a20 HEADING 'Switch Time'
COLUMN name FORMAT a30 HEADING 'Archive Log'
col SWITCH_CHANGE# FORMAT 99999999999999
SELECT
X.recid,
a.thread#,
a.sequence#,
a.first_change#,
a.switch_change#,
TO_CHAR(a.first_time,'DD-MON-YYYY HH24:MI:SS') first_time,x.name
FROM v$loghist a, v$archived_log x
WHERE 1=1
and a.first_time>(SELECT b.first_time-1 FROM v$loghist b WHERE b.switch_change# = (SELECT MAX(c.switch_change#) FROM v$loghist c)
)
AND x.recid(+)=a.sequence#;
CLEAR COLUMNS
TTITLE OFF
alter session set nls_date_format='dd-mm-yyyy hh24:mi:ss';
set linesize 300 pagesize 500 numf 99999999999999999999
col NAME for a80
select name,
sequence#,
thread#,
first_time,
next_time,
NEXT_CHANGE#,
completion_time,
blocks,
archived,
applied,
deleted,
decode(status, 'A', 'Available', 'D', 'Deleted', 'U', 'Unavailable', 'X', 'Expired') status
from v$archived_log
where 1=1
-- name is not null
and creator='ARCH'
and completion_time> sysdate -1
--and FIRST_CHANGE# >(select MAX_NEXT_CHANGE# from v$backup_archivelog_summary)
--and sequence#> 20670
--and first_time>'18-JAN-23'
order by 2;
list backup of datafile 1; -- Ckp SCN + 1 is the SCN until which you should recover.
list archivelog from scn SCN_bkupstart until scn SCN_bkupend+1; ---RESTORE PREVIEW
to get archive logs
define SCN_bkupstart=4079881
alter session set nls_date_format='DD-MON-YYYY HH24:MI:SS';
col NAME for a60
select name, thread#, sequence#, status, first_time, next_time, first_change#, next_change# from v$archived_log
where &SCN_bkupstart between first_change# and next_change#;
delete archivelog all;
delete archivelog until time 'SYSDATE-10';
delete archivelog from time 'SYSDATE-10' ;
delete archivelog from time 'SYSDATE-10' until time 'SYSDATE-2';
delete archivelog from sequence 100;
delete archivelog until sequence 100;
delete archivelog from sequence 100 until sequence 150;
DELETE NOPROMPT ARCHIVELOG UNTIL SEQUENCE = 3790;
delete noprompt force archivelog all backed up 1 times to sbt_tape
define num_days=1
set linesize 300
col REDO_GRAPH for a40
col READ_GRAPH for a40
col WRITE_GRAPH for a40
SELECT redo_hist.snap_id AS SnapshotID
, TO_CHAR(redo_hist.snaptime, 'DD-MON HH24:MI:SS') as SnapshotTime
, ROUND(redo_hist.statval/elapsed_time/1048576,2) AS Redo_MBsec
, SUBSTR(RPAD('*', 20 * ROUND ((redo_hist.statval/elapsed_time) / MAX (redo_hist.statval/elapsed_time) OVER (), 2), '*'), 1, 20) AS Redo_Graph
, ROUND(physical_read_hist.statval/elapsed_time/1048576,2) AS Read_MBsec
, SUBSTR(RPAD('*', 20 * ROUND ((physical_read_hist.statval/elapsed_time) / MAX (physical_read_hist.statval/elapsed_time) OVER (), 2), '*'), 1, 20) AS Read_Graph
, ROUND(physical_write_hist.statval/elapsed_time/1048576,2) AS Write_MBsec
, SUBSTR(RPAD('*', 20 * ROUND ((physical_write_hist.statval/elapsed_time) / MAX (physical_write_hist.statval/elapsed_time) OVER (), 2), '*'), 1, 20) AS Write_Graph
FROM (SELECT s.snap_id
,g.value AS stattot
,s.end_interval_time AS snaptime
,NVL(DECODE(GREATEST(VALUE, NVL(lag (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), 0)), VALUE, VALUE - LAG (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), VALUE), 0) AS statval
,(EXTRACT(day FROM s.end_interval_time)-EXTRACT(day FROM s.begin_interval_time))*86400 +
(EXTRACT(hour FROM s.end_interval_time)-EXTRACT(hour FROM s.begin_interval_time))*3600 +
(EXTRACT(minute FROM s.end_interval_time)-EXTRACT(minute FROM s.begin_interval_time))*60 +
(EXTRACT(second FROM s.end_interval_time)-EXTRACT(second FROM s.begin_interval_time)) as elapsed_time
FROM dba_hist_snapshot s,
dba_hist_sysstat g,
v$instance i
WHERE s.snap_id = g.snap_id
AND s.begin_interval_time >= sysdate-NVL('&num_days', 0.5)
AND s.instance_number = i.instance_number
AND s.instance_number = g.instance_number
AND g.stat_name = 'redo size') redo_hist,
(SELECT s.snap_id
,g.value AS stattot
,NVL(DECODE(GREATEST(VALUE, NVL(lag (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), 0)), VALUE, VALUE - LAG (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), VALUE), 0) AS statval
FROM dba_hist_snapshot s,
dba_hist_sysstat g,
v$instance i
WHERE s.snap_id = g.snap_id
AND s.begin_interval_time >= sysdate-NVL('&num_days', 0.5)
AND s.instance_number = i.instance_number
AND s.instance_number = g.instance_number
AND g.stat_name = 'physical read total bytes') physical_read_hist,
(SELECT s.snap_id
,g.value AS stattot
,NVL(DECODE(GREATEST(VALUE, NVL(lag (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), 0)), VALUE, VALUE - LAG (VALUE) OVER (PARTITION BY s.dbid, s.instance_number, g.stat_name
ORDER BY s.snap_id), VALUE), 0) AS statval
FROM dba_hist_snapshot s,
dba_hist_sysstat g,
v$instance i
WHERE s.snap_id = g.snap_id
AND s.begin_interval_time >= sysdate-NVL('&num_days', 0.5)
AND s.instance_number = i.instance_number
AND s.instance_number = g.instance_number
AND g.stat_name = 'physical write total bytes') physical_write_hist
WHERE redo_hist.snap_id = physical_read_hist.snap_id
AND redo_hist.snap_id = physical_write_hist.snap_id
ORDER BY 1;
===================================================
SRDC - Collect Data Guard Diagnostic Information (Doc ID 2219763.1)
select time "Day" ,round(max(value/1024),2) "Max_Redo_KB/s",round(avg(value)/1024,2) "Avg_Redo(top30_mins)_KB/s",round(avg(value)*60*20/1024,0) "Recommended_ORL_size_KB"
from (
with agg as (
select to_char(BEGIN_TIME,'DD/MON/YYYY') time,value, dense_rank() over (partition by to_char(BEGIN_TIME,'DD/MON/YYYY') order by value desc) as rank
from dba_hist_sysmetric_history where metric_name = 'Redo Generated Per Sec' and to_char(BEGIN_TIME,'DD/MON/YYYY') > sysdate -7)
select time,value from agg where agg.rank <31 0="" 1="" 20="" 2="" average="" begin_time="" by="" dba_hist_sysmetric_summary="" dbid="" desc="" end_time="" first_snap_id="" from="" group="" instance_number="" lag="" metric_name="Redo Generated Per Sec" metric_unit="" order="" over="" redo_gen_mbps="" round="" second_snap_id="" select="" snap_id="" time="" where=""> 0
;
INSTANCE_NUMBER FIRST_SNAP_ID SECOND_SNAP_ID BEGIN_TIME END_TIME METRIC_NAME METRIC_UNIT REDO_GEN_MBPS
--------------- ------------- -------------- ------------------- ------------------- ------------------------- ------------------ ---------------
1 2372 2373 2023-04-14 01:59:41 2023-04-14 02:59:41 Redo Generated Per Sec Bytes Per Second .25
1 2373 2374 2023-04-14 02:59:41 2023-04-14 03:59:41 Redo Generated Per Sec Bytes Per Second .23
1 2374 2375 2023-04-14 03:59:41 2023-04-14 04:59:41 Redo Generated Per Sec Bytes Per Second .39
1 2375 2376 2023-04-14 05:00:40 2023-04-14 06:01:41 Redo Generated Per Sec Bytes Per Second .20
1 2376 2377 2023-04-14 06:19:41 2023-04-14 07:20:41 Redo Generated Per Sec Bytes Per Second .09
1 2377 2378 2023-04-14 07:20:41 2023-04-14 08:04:40 Redo Generated Per Sec Bytes Per Second .13
1 2378 2379 2023-04-14 08:04:40 2023-04-14 08:59:41 Redo Generated Per Sec Bytes Per Second .18
SRDC - Collect Data Guard Diagnostic Information (Doc ID 2219763.1)
set linesize 300
select time "Day" ,round(max(value/1024),2) "Max_Redo_KB/s",round(avg(value)/1024,2) "Avg_Redo(top30_mins)_KB/s",round(avg(value)*60*20/1024,0) "Recommended_ORL_size_KB"
from (
with agg as (
select to_char(BEGIN_TIME,'DD/MON/YYYY') time,value, dense_rank() over (partition by to_char(BEGIN_TIME,'DD/MON/YYYY') order by value desc) as rank
from dba_hist_sysmetric_history where metric_name = 'Redo Generated Per Sec' and to_char(BEGIN_TIME,'DD/MON/YYYY') > sysdate -7)
select time,value from agg where agg.rank <31
)
group by time order by 1 desc
======
31>SELECT
CAST(begin_interval_time AS DATE) begin_time
, AVG(CASE WHEN event_name = 'log file sync' THEN time_waited_micro/nullif(total_waits,0) END) avg_log_file_sync
, AVG(CASE WHEN event_name = 'log file parallel write' THEN time_waited_micro/nullif(total_waits,0) END) avg_log_file_parallel_write
FROM
dba_hist_snapshot
NATURAL JOIN
dba_hist_system_event
WHERE
event_name IN ('log file sync', 'log file parallel write')
AND begin_interval_time > SYSDATE - 15
GROUP BY CAST(begin_interval_time AS DATE)
ORDER BY
begin_time
/
set linesize 300 pagesize 300
define 3="TIMESTAMP'2023-07-18 12:00:00'"
define 4="TIMESTAMP'2023-07-20 12:30:00'"
col NAME for a70
col next_change# format 999999999999999999
col first_change# format 999999999999999999
select NAME,sequence#, to_char(first_time,'DD-MM-YYYY HH24:MI:SS') first_time, first_change#, to_char(next_time,'DD-MM-YYYY HH24:MI:SS') next_time,
next_change# from v$archived_log
where 1=1
and completion_time between &3 and &4
;
NAME SEQUENCE# FIRST_TIME FIRST_CHANGE# NEXT_TIME NEXT_CHANGE#
---------------------------------------------------------------------- ---------- ------------------- ---------------- ------------------- ----------------
1685 18-07-2023 07:00:09 2050309901 18-07-2023 16:00:03 2056880138
1686 18-07-2023 16:00:03 2056880138 18-07-2023 18:06:18 2058412688
1667 18-07-2023 01:00:03 2046039470 18-07-2023 18:06:21 2058415190
set linesize 300 pagesize 500 numf 99999999999999999999
col NAME for a80
col command for a120
select
'cp ' ||name ||' ' ||'/dumps/arch' command,
--'cp ' ||name ||' ' ||'/dumps/' ||substr(name,instr(name,'/',-1)+1, instr(substr(name,instr(name,'/',-1)+1),'.')-1 ) command,
--'+RECO01/*****_IND/archivelog/2023_01_18/'||name|| ' ' ||'/u02/backup/archivelogs/2016_05_03'||name,
sequence#
--,
/* thread#,
first_time,
next_time,
NEXT_CHANGE#,
completion_time,
blocks,
archived,
applied,
deleted,
decode(status, 'A', 'Available', 'D', 'Deleted', 'U', 'Unavailable', 'X', 'Expired') status
*/
from v$archived_log
where 1=1
and name is not null
-- and creator='ARCH'
--and completion_time> sysdate -1
--and FIRST_CHANGE# >(select MAX_NEXT_CHANGE# from v$backup_archivelog_summary)
--and sequence#> 20669
--and first_time>'18-JAN-23'
--order by 2
;
with size !!!
set pagesize 3000 linesize 300
col name format a78
col thread# format 999999
col archived format a8
col applied format a7
col deleted format a7
col status format a11
col "Size (GB)" for 9999.99
alter session set nls_date_format='dd/mm/yyyy hh24:mi:ss';
select name,
sequence#,
thread#,
first_time,
next_time,
completion_time,
blocks,
blocks*block_size/1024/1024/1024 "Size (GB)",
archived,
applied,
deleted,
decode(status, 'A', 'Available', 'D', 'Deleted', 'U', 'Unavailable', 'X', 'Expired') status
from v$archived_log
where 1=1
-- name is not null
and creator='ARCH'
and completion_time> sysdate -1
order by 2;
set pagesize 500 linesize 300
COLUMN c1 format a10 HEADING "Month"
COLUMN c2 format a25 HEADING "Archive Date"
COLUMN c3 format 999 HEADING "Switches"
COMPUTE AVG of C on A
COMPUTE AVG of C on REPORT
BREAK ON A skip 1 ON REPORT SKIP 1
SELECT TO_CHAR(TRUNC(first_time), 'Month') c1,
TO_CHAR(TRUNC(first_time), 'Day : DD-Mon-YYYY') c2, COUNT(*) c3
FROM v$log_history
WHERE TRUNC(first_time) > last_day(sysdate-100) +1
GROUP BY TRUNC(first_time);
REM Daily COUNT and Size of Redo Log Space (Single Instance)
SELECT A.*, ROUND(A.COUNT#*B.AVG#/1024/1024) Daily_Avg_Mb
FROM
(
SELECT
TO_CHAR(First_Time,'YYYY-MM-DD') DAY,
COUNT(1) COUNT#,
MIN(RECID) Min#,
MAX(RECID) Max#
FROM
v$log_history
GROUP BY
TO_CHAR(First_Time,'YYYY-MM-DD')
ORDER BY 1 DESC
) A,
(
SELECT
AVG(BYTES) AVG#,
COUNT(1) COUNT#,
MAX(BYTES) Max_Bytes,
MIN(BYTES) Min_Bytes
FROM v$log
) B
;
=====
ASMCMD [+] > cp +DATA/IRAC/ARCHIVELOG/2023_01_19/thread_2_seq_857.4137.1126502937 /dumps/arch
copying +DATA/IRAC/ARCHIVELOG/2023_01_19/thread_2_seq_857.4137.1126502937 -> /dumps/arch/thread_2_seq_857.4137.1126502937
======
cat asmcmdcp.sh
#!/bin/bash
. /home/grid/.bash_profile
for i in $(asmcmd ls +DATA/IBRAC/ARCHIVELOG/2023_01_19);
do asmcmd cp +DATA/IRAC/ARCHIVELOG/2023_01_19/$i /dumps/arch;
done
chmod g+w /dumps/arch
chmod 700 asmcmdcp.sh
./asmcmdcp.sh
copying +DATA/IBRAC/ARCHIVELOG/2023_01_19/thread_2_seq_857.4137.1126502937 -> /dumps/arch/thread_2_seq_857.4137.1126502937
copying +DATA/IBRAC/ARCHIVELOG/2023_01_19/thread_2_seq_858.4175.1126502939 -> /dumps/arch/thread_2_seq_858.4175.1126502939
=====================
asmcmd ls +DATA/IBRAC/ARCHIVELOG/2023_01_19
asmcmd ls +DATA/IBRAC/ARCHIVELOG/2023_01_19
thread_2_seq_857.4137.1126502937
thread_2_seq_858.4175.1126502939
===
RMAN>
backup format '/dumps/arch/%d_%s_%p_%c_%t.arc.bkp' archivelog from sequence 857 until sequence 858 thread 2;
run
{
set archivelog destination to '/dumps/arch/restore/';
restore archivelog from logseq=857 until logseq=858 thread=2;
}
run
{
set archivelog destination to '/dumps/arch/restore/';
restore archivelog from logseq=857 until logseq=858 thread=2;
}
RMAN> 2> 3> 4> 5>
executing command: SET ARCHIVELOG DESTINATION
Starting restore at 19-JAN-23
allocated channel: ORA_DISK_1
channel ORA_DISK_1: SID=7 instance=ibrac1 device type=DISK
channel ORA_DISK_1: starting archived log restore to user-specified destination
archived log destination=/dumps/arch/restore/
channel ORA_DISK_1: restoring archived log
archived log thread=2 sequence=857
channel ORA_DISK_1: restoring archived log
archived log thread=2 sequence=858
channel ORA_DISK_1: reading from backup piece /dumps/arch/IBRAC_1279_1_1_1126511064.arc.bkp
channel ORA_DISK_1: piece handle=/dumps/arch/IBRAC_1279_1_1_1126511064.arc.bkp tag=TAG20230119T074424
channel ORA_DISK_1: restored backup piece 1
channel ORA_DISK_1: restore complete, elapsed time: 00:00:01
Finished restore at 19-JAN-23
====
run
{
set archivelog destination to '/dumps/arch/restore1/';
restore archivelog from logseq=857 until logseq=858 thread=2 FORCE; --- use force
}
RMAN> 2> 3> 4> 5>
executing command: SET ARCHIVELOG DESTINATION
Starting restore at 19-JAN-23
allocated channel: ORA_DISK_1
channel ORA_DISK_1: SID=7 instance=ibrac1 device type=DISK
channel ORA_DISK_1: starting archived log restore to user-specified destination
archived log destination=/dumps/arch/restore/
channel ORA_DISK_1: restoring archived log
archived log thread=2 sequence=857
channel ORA_DISK_1: restoring archived log
archived log thread=2 sequence=858
channel ORA_DISK_1: reading from backup piece /dumps/arch/IBRAC_1279_1_1_1126511064.arc.bkp
channel ORA_DISK_1: piece handle=/dumps/arch/IBRAC_1279_1_1_1126511064.arc.bkp tag=TAG20230119T074424
channel ORA_DISK_1: restored backup piece 1
channel ORA_DISK_1: restore complete, elapsed time: 00:00:01
Finished restore at 19-JAN-23
=====
Excessive archive log generation during ....
set linesize 500 pagesize 300
col SQL for a50
SELECT to_char(begin_interval_time,'DD-MM-YYYY HH24:MI') WHEN,
dbms_lob.substr(sql_text,50,1) SQL,
dhss.instance_number INST_ID,
dhss.sql_id,
executions_delta exec_delta,
rows_processed_delta rows_proc_delta
FROM dba_hist_sqlstat dhss, dba_hist_snapshot dhs, dba_hist_sqltext dhst
WHERE 1=1
-- and upper(dhst.sql_text) LIKE '%USR_RACUNI_MV%'
AND ltrim(upper(dhst.sql_text)) NOT LIKE 'SELECT%'
AND dhss.snap_id=dhs.snap_id
AND dhss.instance_number=dhs.instance_number
AND dhss.sql_id=dhst.sql_id
AND begin_interval_time BETWEEN to_date('26-06-23 09:00','DD-MM-YY HH24:MI') AND to_date('26-06-23 10:00','DD-MM-YY HH24:MI')
and executions_delta> 10
order by 5 desc
;
set linesize 500 pagesize 300
col SQL for a50
col kill for a17
col EVENT for a27
SELECT
''''||sid ||','|| serial#||',@'||inst_id ||'''' kill,
--to_char(begin_interval_time,'DD-MM-YYYY HH24:MI') WHEN,
dbms_lob.substr(sql_text,50,1) SQL,
dhss.instance_number INST_ID,
dhss.sql_id,
executions_delta exec_delta,
rows_processed_delta rows_proc_delta ,
event
FROM dba_hist_sqlstat dhss, gv$session s, dba_hist_sqltext dhst
WHERE 1=1
-- and upper(dhst.sql_text) LIKE '%USR_RACUNI_MV%'
AND ltrim(upper(dhst.sql_text)) NOT LIKE 'SELECT%'
--AND dhss.snap_id=dhs.snap_id
AND dhss.instance_number=s.INST_ID
AND dhss.sql_id=dhst.sql_id
AND s.sql_id=dhst.sql_id
and executions_delta> 10
and status='ACTIVE'
order by 5 desc
;
https://github.com/tanelpoder/tpt-oracle/blob/master/awr/awr_evh.sql
alter session set nls_date_format='dd-mm-yyyy hh24:mi';
BREAK ON snap_begin SKIP 1 ON snap_end ON event_name
COL event_name FOR A40
SELECT
CAST(begin_interval_time AS DATE) snap_begin
, TO_CHAR(CAST(end_interval_time AS DATE), 'HH24:MI') snap_end
, event_name
, wait_time_milli
, CASE WHEN wait_count >= LAG(wait_count) OVER (PARTITION BY event_name,wait_time_milli ORDER BY CAST(begin_interval_time AS DATE)) THEN
wait_count - LAG(wait_count) OVER (PARTITION BY event_name,wait_time_milli ORDER BY CAST(begin_interval_time AS DATE))
ELSE
wait_count
END wait_count
FROM
dba_hist_snapshot
NATURAL JOIN
dba_hist_event_histogram
WHERE 1=1
and begin_interval_time > SYSDATE - 1/24
--AND event_name LIKE 'ASM file metadata operation'
--AND event_name LIKE 'flashback log switch'
-- AND event_name LIKE 'KSV master wait'
--AND wait_class = 'write complete waits'
and event_name='write complete waits'
ORDER BY
event_name
, snap_begin
, wait_time_milli
/
alter session set nls_date_format='dd-mm-yyyy hh24:mi';
with base_line as (
select
/*+ materialize */
snp.snap_id,
--to_char(snp.end_interval_time,'DD-MON-YY HH24') end_time,
to_char(snp.end_interval_time,'DD-MON-YY hh:mi') end_time,
snp.instance_number,
sst.value
from
dba_hist_snapshot snp,
dba_hist_sysstat sst
where snp.instance_number = sst.instance_number
and end_interval_time >= to_date(trunc(sysdate-nvl('&&history_days',30)))
and sst.dbid = snp.dbid
and sst.instance_number = snp.instance_number
and sst.snap_id = snp.snap_id
and sst.stat_name = 'redo size'
)
select
b1.end_time as snap_time ,
b1.instance_number as instance_number,
sum(b2.value - b1.value)/1024/1024 "redo_size (MB)"
from
base_line b1,
base_line b2
where
b2.snap_id = b1.snap_id + 1
and b2.instance_number = b1.instance_number
group by
b1.end_time, b1.instance_number
order by
b1.end_time
--, b1.instance_number
;
SNAP_TIME INSTANCE_NUMBER redo_size (MB)
--------------------------- --------------- -------------------
01-JUL-23 01:00 1 14,010.29
01-JUL-23 01:15 1 14,668.86
01-JUL-23 01:30 1 14,843.70
01-JUL-23 01:45 1 13,773.07
01-JUL-23 02:00 1 14,085.59
01-JUL-23 02:15 1 13,481.85
01-JUL-23 02:30 1 9,254.47
01-JUL-23 02:31 1 4,411.47
01-JUL-23 02:45 1 13,977.13
====
set linesize 300 col name for a80 alter session set nls_date_format='dd-mm-yyyy hh24:mi'; select COMPLETION_TIME, name from v$archived_log where 1=1 --and first_time > trunc(sysdate-10) and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') and name is not null --group by name order by 1 ;
===
days ....
col DIFF for 999.99 alter session set nls_date_format='dd-mm-yyyy hh24:mi'; select max(COMPLETION_TIME) max_date,min(COMPLETION_TIME) min_date, max(COMPLETION_TIME)-min(COMPLETION_TIME) diff from v$archived_log where 1=1 --and first_time > trunc(sysdate-10) and dest_id = (select dest_id from V$ARCHIVE_DEST_STATUS where status='VALID' and type='LOCAL') and NAME is not null ;
backup info !!! set linesize 400 pagesize 300 col NAME for a80 col HANDLE for a60 select unique a.name, HANDLE, TO_CHAR(a.FIRST_TIME, 'dd-mm-yyyy HH24:MI:SS'), TO_CHAR(a.NEXT_TIME, 'dd-mm-yyyy HH24:MI:SS'), (a.BLOCKS*a.BLOCK_SIZE), TO_CHAR(a.COMPLETION_TIME, 'dd-mm-yyyy HH24:MI:SS') from v$backup_piece p, V$ARCHIVED_LOG a, V$BACKUP_REDOLOG b where 1=1 and a.first_change#=b.first_change# and a.SEQUENCE#=b.SEQUENCE# and b.set_stamp=p.set_stamp and b.set_count=p.set_count /col USERNAME for a20 col MACHINE for a30 col event for a18 col kill for a17 col OSUSER for a10 set linesize 300 pagesize 300 select --b.inst_id, b.SID, b.serial# sid_serial, ''''||b.sid||','||b.serial#||',@'||b.inst_id||'''' kill, TYPE, --con_id, b.username, machine, b.osuser, b.status, a.redo_mb MB,b.sql_id,b.prev_sql_id,b.event,PROCESS from (select n.inst_id, sid, round(value/1024/1024) redo_mb from gv$statname n, gv$sesstat s where n.inst_id=s.inst_id --and n.statistic#=134 and NAME like 'redo%' and s.statistic# = n.statistic# order by value desc ) a, gv$session b where b.inst_id=a.inst_id and a.sid = b.sid and rownum <= 50; SELECT s.inst_id, s.sid, s.serial#, p.spid, s.username, s.program FROM gv$session s JOIN gv$process p ON p.addr = s.paddr AND p.inst_id = s.inst_id WHERE 1=1 --and s.PROCESS=19842 --and s.type != 'BACKGROUND' ;===================================================set linesize 400 pagesize 400 col name for a30 col value for a60 select name, value from v$parameter where (regexp_like(name, '^log_archive_(dest|dest\_([1-9]))$', 'i') or name like '%dump_dest' or name like '%file_dest' or name like 'diag%dest' or name ='utl_file_dir' ) and value is not null; NAME VALUE ------------------------------ ------------------------------------------------------------ db_create_file_dest +DATADISK db_recovery_file_dest +DATADISK background_dump_dest /u01/app/oracle/product/19.0.0/db_1/rdbms/log user_dump_dest /u01/app/oracle/product/19.0.0/db_1/rdbms/log core_dump_dest /u01/app/oracle/diag/rdbms/oradb/oradb/cdump audit_file_dest /u01/app/oracle/admin/oradb/adump diagnostic_dest /u01/app/oracle 7 rows selected.from Web define num_days=1 col num_days new_value num_days noprint set feed on term on echo off tab off col log_date format a20 head 'LOG DATE' col bytes format 99,999,999,999,999 heading Gb col bytes_today format 99,999,999,999,999 heading Gb_today col bytes_Nday format 99,999,999,999,999 heading Gb_NDAY set pagesize 100 set linesize 200 trimspool on -- for repeated use a temp table of archive logs -- is *much* faster --define src_table='archlogs' define src_table='v$archived_log' with rawlogs as ( select distinct a.first_time, a.sequence#, a.thread#, a.block_size, a.blocks from &src_table a order by a.first_time, a.sequence#,a.thread# ), logdaysums as ( select trunc(r.first_time) log_date , sum(block_size * blocks) bytes from rawlogs r group by trunc(r.first_time) ), ndaysums as ( select l.log_date , &num_days days , l.bytes/1024/1024/1024 bytes_today , sum(l.bytes/1024/1024/1024) over (order by l.log_date rows (&num_days - 1) preceding) bytes_Nday from logdaysums l group by log_date, &num_days, bytes/1024/1024/1024 order by l.log_date ) select to_char(n.log_date,'yyyy-mm-dd') log_date, n.days, n.bytes_today, n.bytes_Nday from ndaysums n union all select '== AVG N DAYS =====', null, null,null from dual union all select 'AVERAGES' log_date, n.days, avg(n.bytes_today) bytes_today, avg(n.bytes_Nday) bytes_Nday from ndaysums n group by 'AVERAGES', n.days union all select '== MEDIAN N DAYS ===', null, null,null from dual union all select 'MEDIAN' log_date, n.days, median(n.bytes_today) bytes_today, median(n.bytes_Nday) bytes_Nday from ndaysums n group by 'MEDIAN', n.days union all select '== MIN N DAYS =====', null, null,null from dual union all select to_char(n2.log_date,'yyyy-mm-dd') log_date, n2.days, null, n2.bytes_Nday from ndaysums n2 where (n2.bytes_Nday) in ( select min(n3.bytes_Nday) bytes_Nday from ndaysums n3 ) union all select '== MAX N DAYS =====', null, null,null from dual union all select to_char(n2.log_date,'yyyy-mm-dd') log_date, n2.days, null, n2.bytes_Nday from ndaysums n2 where (n2.bytes_Nday) in ( select max(n3.bytes_Nday) bytes_Nday from ndaysums n3 ) /********************Find a session which are generating more archive logs set linesize 300 col username for a20 SELECT ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill , s.username, s.program, i.block_changes,s.sql_id FROM gv$session s, gv$sess_io i WHERE s.sid = i.sid and s.inst_id = i.inst_id ORDER BY 5 desc, 1, 2, 3, 4 ; set linesize 300 col username for a20 col kill for a17 SELECT ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill , s.username, s.program, t.used_ublk, t.used_urec FROM gv$session s, gv$transaction t WHERE s.taddr = t.addr and s.inst_id = t.inst_id ORDER BY 1, 2, 3, 4; set linesize 300 col sql_text for a50 wrap col kill for a17 select ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill ,s.sql_id, t.USED_UREC Records, t.USED_UBLK Blocks, (t.USED_UBLK*8192/1024) KBytes,sql.sql_text sql_text from gv$transaction t, gv$session s, gv$sql sql where 1=1 and t.addr = s.taddr and t.inst_id = s.inst_id and s.sql_id = sql.sql_id and s.inst_id = sql.inst_id --and s.username ='USERNAME' and s.sid=&1 ; col sql_text for a50 wrap SELECT ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill , s.sql_id, sq.SQL_FULLTEXT sql_text FROM GV$SESSION s, gv$sql sq, gv$sess_io io WHERE s.sql_address = sq.address(+) AND s.sql_hash_value = sq.hash_value(+) AND s.sid = io.sid(+) AND s.inst_id = io.inst_id(+) AND s.sql_child_number = sq.child_number (+) AND s.TYPE = 'USER' and s.sql_id is not null AND s.STATUS <> 'INACTIVE' set linesize 300 pagesize 300 col kill for a15 col USERNAME for a24 col "Roll Status" for a27 col EVENT for a28 select ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill ,s.username,sql_id,t.used_urec,t.used_ublk,event ,case when bitand(flag,power(2,7)) > 0 then 'Rolling Back' else 'Not Rolling Back' end as "Roll Status" from gv$session s, gv$transaction t where 1=1 and s.saddr = t.ses_addr and s.inst_id = t.inst_id order by t.used_ublk desc ; set linesize 300 pagesize 300 col kill for a15 col USERNAME for a24 col "Roll Status" for a27 col EVENT for a28 col CLIENT_INFO for a20 select ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill,USERNAME, s.client_info, t.addr,sql_id, sum(t.used_ublk) used_ublk ,case when bitand(flag,power(2,7)) > 0 then 'Rolling Back' else 'Not Rolling Back' end as "Roll Status" from gv$transaction t, gv$session s where t.addr = s.taddr and s.inst_id = t.inst_id group by ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''',USERNAME, s.client_info, t.addr,sql_id,bitand(flag,power(2,7)) ; set linesize 300 pagesize 300 col kill for a15 col SQL_TEXT for a50 wrap select ''''||s.sid ||','|| s.serial#||',@'||s.inst_id ||'''' kill, t.used_urec records, t.used_ublk blocks,(t.used_ublk*8192/1024) kb,sql.sql_text from gv$transaction t,gv$session s, gv$sql sql where t.addr=s.taddr and s.sql_id = sql.sql_id and s.inst_id = sql.inst_id and s.inst_id = t.inst_id and s.sql_id='&sql_id' -- and s.username ='USERNAME' ;from oracheckTo verify redo log size, as the owner userid of a given database, and with the environment properly set to access that database, execute the following command set:
function logswitches {
$ORACLE_HOME/bin/sqlplus -s '/ as sysdba' <<EOF
set feedback off newpage none termout on
alter session set nls_date_format='YYYY/MM/DD HH24:MI:SS';
select * from (
select thread#,sequence#,first_time "LOG START TIME",(blocks*block_size/1024/1024)/((next_time-first_time)*86400) "REDO RATE(MB/s)", \
(((blocks*block_size)/a.average)*100) pct_full
from v\$archived_log, (select avg(bytes) average from v\$log) a
where ((next_time-first_time)*86400<300)
and first_time > (sysdate-90)
and (((blocks*block_size)/a.average)*100)>80
and dest_id=1
order by 4 desc
)
where rownum<11;
exit
EOF
}
export SWITCHES=$(logswitches)
if [ $(echo "$SWITCHES"| wc -l) -le 1 ]
then
echo -e "SUCCESS: Redo logs are appropriately sized"
else
echo
echo -e "WARNING: Redo logs are potentially mis-sized. Below is a list of archived logs from"
echo -e "the previous 90 days which were active for less than 5 minutes and the redo rate seen"
echo -e "for the duration of that log. These indicate the peak redo rate. Resizing of the log"
echo -e "files to accomodate this rate may be required.\n"
echo "$SWITCHES"
fi===ROW_NUM_NOPRINT THREAD# YYYY_MM_DD DAY H00 H01 H02 H03 H04 H05 H06 H07 H08 H09 H10 H11 H12 H13 H14 H15 H16 H17 H18 H19 H20 H21 H22 H23 TOT_GB CNT AVG_GB --------------- ---------- ---------- ---- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- 12 1 2025-02-15 Sat 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 103.9 96 1.1 13 1 2025-02-14 Fri 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 305.5 97 3.1 14 1 2025-02-13 Thu 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 296.3 97 3.1 14 rows selected.https://github.com/guestart/Oracle-SQL-Scripts/blob/master/awr_trend/acquire_redo_gen_mbps.sql SET LINESIZE 200 PAGESIZE 200 COLUMN metric_name FORMAT a25 COLUMN metric_unit FORMAT a18 COLUMN redo_gen_mbps FORMAT 999,999,999.99 ALTER SESSION SET nls_date_format = 'dd-mm-yyyy hh24:mi:ss'; WITH dhsh AS ( SELECT * FROM ( SELECT instance_number , LAG(snap_id, 1, 0) OVER(PARTITION BY dbid, instance_number ORDER BY snap_id) first_snap_id , snap_id second_snap_id , MIN(begin_time) begin_time , MAX(end_time) end_time , metric_name , metric_unit , SUM(value/POWER(2, 20)*(intsize/1e2)) redo_gen_mb_size , (MAX(end_time)-MIN(begin_time))*24*36e2 interval_secs FROM dba_hist_sysmetric_history WHERE metric_name = 'Redo Generated Per Sec' GROUP BY dbid , instance_number , snap_id , metric_name , metric_unit ORDER BY instance_number , first_snap_id ) WHERE first_snap_id <> 0 ) SELECT instance_number , first_snap_id , second_snap_id , begin_time , end_time , metric_name , metric_unit , ROUND(redo_gen_mb_size/interval_secs, 2) redo_gen_mbps FROM dhsh ; or SELECT * FROM ( SELECT instance_number , LAG(snap_id, 1, 0) OVER(PARTITION BY dbid, instance_number ORDER BY snap_id) first_snap_id , snap_id second_snap_id , begin_time , end_time , metric_name , metric_unit , ROUND(average/POWER(2, 20), 2) redo_gen_mbps FROM dba_hist_sysmetric_summary WHERE metric_name = 'Redo Generated Per Sec' ORDER BY instance_number , first_snap_id ) WHERE first_snap_id <> 0 ;Who is generating the redo?To find these segments we can also use queries: DEF 3="TIMESTAMP'2025-05-12 07:15:00'" DEF 4="TIMESTAMP'2025-05-13 14:15:00'" SELECT to_char(begin_interval_time,'YY-MM-DD HH24') snap_time, dhso.object_name, sum(db_block_changes_delta) BLOCK_CHANGED FROM dba_hist_seg_stat dhss, dba_hist_seg_stat_obj dhso, dba_hist_snapshot dhs WHERE dhs.snap_id = dhss.snap_id AND dhs.instance_number = dhss.instance_number AND dhss.obj# = dhso.obj# AND dhss.dataobj# = dhso.dataobj# AND begin_interval_time BETWEEN &3 and &4 -- to_date('11-01-28 13:00','YY-MM-DD HH24:MI') <<<<<<<<<<<< Need to modify the time as per the above query where more redo log switch happened (keep it for 1 hour) --AND to_date('11-01-28 14:00','YY-MM-DD HH24:MI') <<<<<<<<<<<< Need to modify the time as per the above query where more redo log switch happened (interval shld be only 1 hour) GROUP BY to_char(begin_interval_time,'YY-MM-DD HH24'), dhso.object_name HAVING sum(db_block_changes_delta) >1536 ORDER BY sum(db_block_changes_delta) desc ; -- Then : What SQL was causing redo log generation : SELECT to_char(begin_interval_time,'YYYY_MM_DD HH24') WHEN, dbms_lob.substr(sql_text,4000,1) SQL, dhss.instance_number INST_ID, dhss.sql_id, executions_delta exec_delta, rows_processed_delta rows_proc_delta FROM dba_hist_sqlstat dhss, dba_hist_snapshot dhs, dba_hist_sqltext dhst WHERE 1=1 -- upper(dhst.sql_text) LIKE '%SDO%' --->>>>>>>>>>>>>>>>>> Update the segment name as per the result of previous query result AND ltrim(upper(dhst.sql_text)) NOT LIKE 'SELECT%' AND dhss.snap_id=dhs.snap_id AND dhss.instance_number=dhs.instance_number AND dhss.sql_id=dhst.sql_id AND begin_interval_time BETWEEN &3 and &4 --AND begin_interval_time BETWEEN to_date('11-01-28 13:00','YY-MM-DD HH24:MI') >>>>>>>>>>>> Update time frame as required --AND to_date('11-01-28 14:00','YY-MM-DD HH24:MI') >>>>>>>>>>>> Update time frame as required ;db_block_changes_delta column identified the segment that experienced the most changes during the period select a.tablespace_name from sys.dba_data_files a, sys.v_$backup b where b.status = 'ACTIVE' and b.file# = a.file_id group by a.tablespace_name; define 3="TIMESTAMP'2025-05-14 07:00:00'" define 4="TIMESTAMP'2025-05-14 08:00:00'" set linesize 300 pagesize 300 col OBJECT_NAME for a40 select to_char(begin_interval_time,'dd-mm-yyyy HH24:MI') snap_time,dho.CON_ID,dho.object_name, sum(db_block_changes_delta) sum_db_block_changes_delta from dba_hist_seg_stat dhs,dba_hist_seg_stat_obj dho, dba_hist_snapshot dhsot where dhs.snap_id = dhs.snap_id and dhs.instance_number = dhs.instance_number and dhs.obj# = dho.obj# and dhs.dataobj# = dho.dataobj# and begin_interval_time between &3 and &4 group by to_char(begin_interval_time,'dd-mm-yyyy HH24:MI'), dho.CON_ID, dho.object_name having sum(db_block_changes_delta) >10 ; Sql to examine the changes during the period define obj='CUSTOMER_DATA' --- <<<< select to_char(begin_interval_time,'dd-mm-yyyy HH24:MI') snap_time, dhs.con_id, sum(db_block_changes_delta) db_block_changes_delta from dba_hist_seg_stat dhs,dba_hist_seg_stat_obj dho, dba_hist_snapshot dhsot where dhs.snap_id = dhs.snap_id and dhs.instance_number = dhs.instance_number and dhs.obj# = dho.obj# and dhs.dataobj# = dho.dataobj# -- and dho.object_name = '&obj' group by dhs.con_id, to_char(begin_interval_time,'dd-mm-yyyy HH24:MI') ; SQL statements that were run during the period that referenced the table set linesize 300 pagesize 300 col SQL_TEXT for a100 select to_char(begin_interval_time,'dd-mm-yyyy HH24:MI'), dhs.instance_number, dhs.con_id, dhs.sql_id,PLAN_HASH_VALUE,executions_delta,rows_processed_delta, dbms_lob.substr(sql_text,100,1) sql_text from dba_hist_sqlstat dhs, dba_hist_snapshot dhsot, dba_hist_sqltext dhst where 1=1 --and upper(dhst.sql_text) like '%BALANCE%' and dhs.snap_id=dhs.snap_id and dhs.instance_Number=dhs.instance_number and dhs.sql_id = dhst.sql_id ; SQL statements that were run during the period that referenced the table set linesize 300 pagesize 300 VARIABLE BgnSnap NUMBER VARIABLE EndSnap NUMBER exec select max(snap_id) -100 into :BgnSnap from dba_hist_snapshot ; exec select max(snap_id) into :EndSnap from dba_hist_snapshot ; col EVENT for a27 select instance_number, to_char(sample_time,'dd-mm-yyyy hh24:mi:ss'),con_id, sql_id,SQL_PLAN_HASH_VALUE,event, user_id, program from dba_hist_active_sess_history where 1=1 and sql_id = '1m8z8cvabtqyk' and snap_id between :BgnSnap and :EndSnap ; set linesize 300 pagesize 1000 col PLAN_TABLE_OUTPUT for a150 select plan_table_output from table(dbms_xplan.DISPLAY_AWR( sql_id => '1m8z8cvabtqyf' ,plan_hash_value => 1425214927 , -- format => 'TYPICAL' FORMAT => 'TYPICAL +ROWS +BYTES +COST +PARALLEL +PARTITION +IOSTATS +MEMSTATS +ALIAS +PEEKED_BINDS +OUTLINE +PREDICATE -PROJECTION +REMOTE +NOTE' )) ; VARIABLE BgnSnap NUMBER VARIABLE EndSnap NUMBER VARIABLE DID NUMBER VARIABLE INST_NUMBER number exec select max(snap_id) -24 into :BgnSnap from dba_hist_snapshot ; exec select max(snap_id) into :EndSnap from dba_hist_snapshot ; set linesize 300 col PLAN_TABLE_OUTPUT for a200 select --s.elapsed_time_delta, --s.buffer_gets_delta, -- s.disk_reads_delta, cursor(select * from table(dbms_xplan.display_awr(t.sql_id, s.plan_hash_value))) from dba_hist_sqltext t, dba_hist_sqlstat s where t.dbid = s.dbid and t.sql_id = s.sql_id --and s.snap_id between :BgnSnap and :EndSnap and t.sql_id = '1m8z8cvabtqyk' and rownum<3 ;
Subscribe to:
Comments (Atom)
Oracle DBA
anuj blog Archive
- ▼ 2011 (362)
