Wednesday, 27 May 2015

Changing NLS_CHARACTERSET From WE8MSWIN1252 to UTF8

Before beginning, make sure:-

1) There is a full backup of the database taken available
2) There is a backup of the current spfile (especially for RAC databases)



#############################################
STEP 1) INSTALL CSSCAN SCHMEA IF not Install
#############################################


@?/rdbms/admin/csminst.sql






###########################################################################################
STEP 2) Record the SCN number,parameters at this point, Incase of restore
###########################################################################################

Check full database and archivelog back has been completed.

sqlplus '/ as sysdba'

----Note the parameters--------------------------------------------

SQL> SELECT * FROM NLS_DATABASE_PARAMETERS;


SQL> SELECT value$ FROM sys.props$ WHERE name = 'NLS_CHARACTERSET';

VALUE$
--------------------------------------------------------------------------------
WE8MSWIN1252


SQL> SELECT PARAMETER, VALUE FROM NLS_DATABASE_PARAMETERS WHERE PARAMETER IN ('NLS_CHARACTERSET', 'NLS_NCHAR_CHARACTERSET');

PARAMETER                      VALUE
------------------------------ ----------------------------------------
NLS_NCHAR_CHARACTERSET         AL16UTF16
NLS_CHARACTERSET               WE8MSWIN1252


----------------Record the SCN number at this point----------------

SQL>alter system checkpoint;

SQL>alter system switch logfile;

SQL>alter system archive log current;

SQL>alter system archive log current;

SQL>col first_change#  format 999999999999999999999

SQL>select max(first_change#) first_change# from v$archived_log;

--Note down this SCN NO#. This is the that will be used in the final recovery.

         FIRST_CHANGE#
----------------------
        11247993892019

--------------Record the show parameter original values-------------------

show parameter JOB_QUEUE_PROCESSES

NAME                                 TYPE        VALUE
------------------------------------ ----------- ------------------------------
job_queue_processes                  integer     1000



show parameter aq_tm_processes

NAME                                 TYPE        VALUE
------------------------------------ ----------- ------------------------------
aq_tm_processes                      integer     1


#################################################################################
STEP 3) Run FULL scan on Database 
#################################################################################

cat csscan.par 

FULL=Y
FROMCHAR=WE8MSWIN1252
TOCHAR=UTF8
ARRAY=1024000
LOG=SUNIL_WE8MSWIN1252
CAPTURE=Y
PROCESS=60

nohup csscan \"sys/XXXXXXX as sysdba\" parfile=csscan.par &


-- Once Csscan is actually scanning the tables v$session_longops can be used to see the progress of scans of big tables:

SET pages 1000
SELECT target,
TO_CHAR(start_time,'HH24:MI:SS - DD-MM-YY'),
time_remaining,
sofar,
totalwork,
sid,
serial#,
opname
FROM v$session_longops
WHERE sid IN
(SELECT sid FROM v$session WHERE upper(program) LIKE 'CSSCAN%'
)
AND sofar < totalwork
ORDER BY start_time
/




#################################################################################
STEP 5) Check the report SUNIL_WE8ISO8859P1.txt for any Tablespace expansions, If found any add the space accordingly.
#################################################################################

#################################################################################
STEP 6) Do the required cleanups and document them according to the report.
#################################################################################


   
~~~~~~~~~~~~~~~~~~~~~~~~~~~--~~~~~~~~~~~~~~~~
1) Remove first all objects in the recyclebin 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~


SELECT OWNER, ORIGINAL_NAME, OBJECT_NAME, TYPE FROM dba_recyclebin ORDER BY 1,2;

If there are objects in the recyclebin then perform

PURGE DBA_RECYCLEBIN;


*************************
2.a) Invalid objects
*************************

SELECT owner,
object_name,
object_type,
status
FROM dba_objects
WHERE status ='INVALID'
/


If there are any invalid objects run utlrp.sql

SQL> @?/rdbms/admin/utlrp.sql


If there are any left after running utlrp.sql then please Note Those invalid objects.

select count(*) from dba_objects where status='INVALID';

  COUNT(*)
----------
       642

create table SNANABALA.BEFORE_INVALID_OBJECTS as select * from dba_objects where status='INVALID';

*******************************************************
2.b) Orphaned Datapump master tables (10g and up)
*******************************************************

SELECT o.status,
o.object_id,
o.object_type,
o.owner
||'.'
||object_name "OWNER.OBJECT"
FROM dba_objects o,
dba_datapump_jobs j
WHERE o.owner =j.owner_name
AND o.object_name=j.job_name
AND j.job_name NOT LIKE 'BIN$%'
ORDER BY 4,2
/


output :- 

STATUS   OBJECT_ID OBJECT_TYPE
------- ---------- -------------------
OWNER.OBJECT
--------------------------------------------------------------------------------
VALID       722580 TABLE
CM.SYS_IMPORT_TABLE_01

VALID        88292 TABLE
OPS$ORACLE.SYS_IMPORT_FULL_01


See Note 336014.1 How To Cleanup Orphaned DataPump Jobs In DBA_DATAPUMP_JOBS ?

-- drop above datapump jobs

SQL> drop table CM.SYS_IMPORT_TABLE_01;

Table dropped.

SQL> drop table OPS$ORACLE.SYS_IMPORT_FULL_01;

Table dropped.


-- Recheck Again

SELECT o.status,
o.object_id,
o.object_type,
o.owner
||'.'
||object_name "OWNER.OBJECT"
FROM dba_objects o,
dba_datapump_jobs j
WHERE o.owner =j.owner_name
AND o.object_name=j.job_name
AND j.job_name NOT LIKE 'BIN$%'
ORDER BY 4,2
/

no rows selected.




*********************************************************************************
3)  To check the data dictionary tables that need manual interaction when using csalter that generated the CLOB Data Type  use Below  query:
*********************************************************************************

SELECT DISTINCT z.owner_name
|| '.'
   || z.table_name
|| '('
   || z.column_name
|| ') - '
|| z.column_type
|| ' - '
|| z.error_type
|| ' ' NotHandledDataDictColumns
FROM csmig.csmv$errors z
WHERE z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
) minus
SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
|| '('
|| z.column_name
|| ') - '
|| z.column_type
|| ' - '
|| z.error_type
|| ' ' DataDictConvCLob
FROM csmig.csmv$errors z
WHERE z.error_type ='CONVERTIBLE'
AND z.column_type = 'CLOB'
AND z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
 )
ORDER BY NotHandledDataDictColumns
 /

NOTHANDLEDDATADICTCOLUMNS
--------------------------------------------------------------------------------
SYS.HISTGRM$(EPVALUE) - VARCHAR2 - CONVERTIBLE


NOTHANDLEDDATADICTCOLUMNS
--------------------------------------------------------------------------------
SYS.WRI$_OPTSTAT_HISTGRM_HISTORY(EPVALUE) - VARCHAR2 - CONVERTIBLE



-- Solving Convertible or Lossy data in Data Dictionary objects when changing the NLS_CHARACTERSET [ID 258904.1]

**********************************************************
Do not update or truncate SYS.HISTGRM$ itself
**********************************************************

SYS.HISTGRM$ simply holds statistic information about tables. So you do not take action on SYS.HISTGRM$, you need to take action on the statistics on other tables.

For SYS.HISTGRM$ run $ORACLE_HOME/nls/csscan/sql/analyze_histgrm.sql (10g an up location) or $ORACLE_HOME/RDBMS/ADMIN/analyze_histgrm.sql (9i location). You need to drop all statistics on the tables reported by analyze_histgrm.sql by using


cd $ORACLE_HOME/nls/csscan/sql
SQL>  set lines 250
SQL> @$ORACLE_HOME/nls/csscan/sql/analyze_histgrm.sql
Table: SYS.HISTGRM$
Error: CONVERTIBLE DATA


OWNER                          OBJECT_NAME                    OBJECT_TYPE         CONVERITBLE
------------------------------ ------------------------------ ------------------- -----------
KUMAR                           KUMAR_ASSIGNMENT                TABLE                         1


Table: SYS.HISTGRM$
Error: EXCEPTIONAL DATA

no rows selected

-- We need to delete Stats on Above Table

Note: you do NOT need to drop the stats on SYS.HISTGRM$ , you need to drop the stats on the reported tables by the analyze_histgrm.sql script


conn / AS sysdba

EXEC DBMS_STATS.DELETE_TABLE_STATS ('KUMAR', 'KUMAR_ASSIGNMENT');



-- Again Ran Analyze_histgrm.sql to make sure no tables are returning.

SQL> @$ORACLE_HOME/nls/csscan/sql/analyze_histgrm.sql
Table: SYS.HISTGRM$
Error: CONVERTIBLE DATA

no rows selected

Table: SYS.HISTGRM$
Error: EXCEPTIONAL DATA

no rows selected


************************************************************************
5 ) To clear EPVALUE of SYS.WRI$_OPTSTAT_HISTGRM_HISTORY execute:
************************************************************************

-- In CSSCAN.txt SUMMARY its showed VARCHAR2 1 lossy data ( which we need to clean ) 

Data Dictionary Tables:

Datatype                    Changeless      Convertible       Truncation            Lossy
--------------------- ---------------- ---------------- ---------------- ----------------
VARCHAR2                    80,438,447                1                0                0
CHAR                            35,179                0                0                0
LONG                           754,792                0                0                0
CLOB                         8,251,506           36,467                0                0
VARRAY                          55,181                0                0                0
--------------------- ---------------- ---------------- ---------------- ----------------


-- IN CSSCAN.err Found it SYS.WRI$_OPTSTAT_HISTGRM_HISTORY table having lossy data.



SQL> select systimestamp - dbms_stats.get_stats_history_availability from dual;

SYSTIMESTAMP-DBMS_STATS.GET_STATS_HISTORY_AVAILABILITY
---------------------------------------------------------------------------
+000000031 16:30:40.777952000

-- 31 days of retention.

-- now disable stats retention

exec dbms_stats.alter_stats_history_retention(0);

-- remove all stats
exec DBMS_STATS.PURGE_STATS(systimestamp);

-- check there are no more left

select count(*) from SYS.WRI$_OPTSTAT_HISTGRM_HISTORY;

-- this will clear SYS.WRI$_OPTSTAT_HISTGRM_HISTORY(EPVALUE )





********************************************************************************************************************************************
B) Dealing with "Truncation" data |  adapt the columns to fit the expansion of the data 
********************************************************************************************************************************************

How much the data will expand can be seen using:

conn / as sysdba
SET serveroutput ON size 200000
DECLARE
v_Stmt1 VARCHAR2(6000 BYTE);
BEGIN
DBMS_OUTPUT.PUT_LINE('output is: ');
DBMS_OUTPUT.PUT_LINE('owner.table_name column_name - column_type - max expansion in Bytes.');
DBMS_OUTPUT.PUT_LINE('....................................................................');
FOR rec IN
(select u.owner_name, u.table_name, u.column_name, u.column_type, max(max_post_convert_size) post_convert_size FROM csmig.csmv$columns u
WHERE u.exceed_size_rows > to_number('0') GROUP BY u.owner_name, u.table_name, u.column_name, u.column_type ORDER BY u.owner_name, u.table_name, u.column_name)
LOOP
IF rec.post_convert_size <= '2000' and rec.column_type = 'CHAR' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
END IF;
IF rec.post_convert_size > '2000' and rec.column_type = 'CHAR' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
dbms_output.put_line('! Warning !');
dbms_output.put_line('Data in '|| rec.owner_name ||'.'|| rec.table_name ||'.'|| rec.column_name ||' will expand OVER the datatype limit of CHAR !');
IF rec.post_convert_size > '4000' THEN
dbms_output.put_line('The '||rec.column_name ||' CHAR column NEED to change to CLOB to solve the truncation!');
END IF;
IF rec.post_convert_size <= '4000' THEN
dbms_output.put_line('The '||rec.column_name ||' CHAR column NEED to change to VARCHAR2 to solve the truncation!');
END IF;
dbms_output.put_line('Using CHAR semantics for this column without changing datatype will NOT solve the truncation!');
END IF;
IF rec.post_convert_size <= '4000' and rec.column_type = 'VARCHAR2' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size ||' Bytes .');
END IF;
IF rec.post_convert_size > '4000' and rec.column_type = 'VARCHAR2' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
dbms_output.put_line('! Warning !');
dbms_output.put_line('Data in '|| rec.owner_name ||'.'|| rec.table_name ||'.'|| rec.column_name ||' will expand OVER the datatype limit of VARCHAR2 !');
dbms_output.put_line('The '||rec.column_name ||' VARCHAR2 column NEED to change to CLOB to solve the truncation!');
dbms_output.put_line('Using CHAR semantics for this column without changing datatype will NOT solve the truncation!');
END IF;
END LOOP;
END;
/

~~~~~~~~~~~~
OUTPUT:- 
~~~~~~~~~~~~

output is:-

owner.table_name column_name - column_type - max expansion in Bytes.
....................................................................

SUNIL.BLD_CB_SERVICEABILITY (SUNIL_NAME) - VARCHAR2 - 31 Bytes .
SUNIL.CB_SERVICEABILITY (SUNIL_NAME) - VARCHAR2 - 31 Bytes .
SUNIL.STG_CB_SERVICEABILITY (SUNIL_NAME) - VARCHAR2 - 31 Bytes .



-- Expand the Table Columns length  as per above 
vi /home/oracle/sunil/expand_column_length.log
set echo on
set time on
spool /home/oracle/sunil/QVNTG1/expand_column_length.log
alter tableSUNIL.BLD_CB_SERVICEABILITY modify SUNIL_NAME VARCHAR2( 40 BYTE);
alter table SUNIL.CB_SERVICEABILITY modify SUNIL_NAME VARCHAR2( 40 BYTE);
alter tableSUNIL.STG_CB_SERVICEABILITY modify SUNIL_NAME VARCHAR2( 40 BYTE);
spool off;

-- execute the expand_column_length.sql

@/home/oracle/sunil/expand_column_length.sql





********************************************************************************************************************************************
C ) To check the Application Data tables that need manual interaction when using csalter that generated the CLOB Data Type  use Below  query:
********************************************************************************************************************************************

Note :- It is MANDATORY to export and truncate/delete ALL "Convertible" User / Application Data data .


set pages 0
set lines 250
spool /home/oracle/sunil/QVNTG/Application_Data.log

-- it will give table names only 

SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
FROM csmig.csmv$errors z
minus
SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
FROM csmig.csmv$errors z
WHERE z.error_type ='CONVERTIBLE'
AND z.column_type = 'CLOB'
AND z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
)
ORDER BY 1
/

spool off;


**************************************************************
Get those all Tables only from above output and Export 
**************************************************************
vi expdp_app_data.par

userid ="/ as sysdba"
directory=DATA_PUMPS
TABLES=APEX_030200.WWV_FLOW_BANNER, APEX_030200.WWV_FLOW_BUTTON_TEMPLATES, APEX_030200.WWV_FLOW_CUSTOM_AUTH_SETUPS, APEX_030200.WWV_FLOW_FLASH_CHART_SERIES, APEX_030200.WWV_FLOW_LIST_TEMPLATES, APEX_030200.WWV_FLOW_PAGE_GENERIC_ATTR, APEX_030200.WWV_FLOW_PAGE_PLUGS, APEX_030200.WWV_FLOW_PAGE_PLUG_TEMPLATES, APEX_030200.WWV_FLOW_PROCESSING, APEX_030200.WWV_FLOW_ROW_TEMPLATES, APEX_030200.WWV_FLOW_SHORTCUTS, APEX_030200.WWV_FLOW_STEPS, APEX_030200.WWV_FLOW_STEP_PROCESSING, APEX_030200.WWV_FLOW_TEMPLATES, APEX_030200.WWV_FLOW_WORKSHEETS, 
parallel = 16
cluster = n
dumpfile=qvntg_trunc_tables_%U.dmp
logfile=trunc_tables.log
job_name=trunc_tables 

-- execute

nohup expdp parfile=expdp_app_data.par & 

###############################################################
--- TRUNCATE THE ALL THE TABLES WHICH ARE REPORTED ABOVE
###############################################################

Truncate all the Tables . 

Below are the tables which are failed in Truncation. 


APEX_030200.WWV_FLOW_WORKSHEETS
APEX_030200.WWV_FLOW_STEP_PROCESSING
APEX_030200.WWV_FLOW_STEPS
APEX_030200.WWV_FLOW_PAGE_PLUGS
XXX.XXXX_ASSIGNMENT


ERROR at line 1:
ORA-02266: unique/primary keys in table referenced by enabled foreign keys



-- Get all Constraints on above all tables which is failed &  having parent and child key relation 

set pages 0
set lines 250
spool /home/oracle/sunil/QVNTG/disable_constraints.sql
select 'alter table '||a.owner||'.'||a.table_name||' disable constraint '||a.constraint_name||';'
from all_constraints a, all_constraints b
where a.constraint_type = 'R'
and a.r_constraint_name = b.constraint_name
and a.r_owner  = b.owner
and b.table_name in ('XXXX_ASSIGNMENT', 'WWV_FLOW_PAGE_PLUGS', 'WWV_FLOW_STEPS','WWV_FLOW_STEP_PROCESSING','WWV_FLOW_WORKSHEETS');
spool off;


-- Get enable Constrainsts scripts ( which will helpful to enable after import )

set pages 0
set lines 250
spool /home/oracle/sunil/QVNTG/enable_constraints.sql
select 'alter table '||a.owner||'.'||a.table_name||' enable constraint '||a.constraint_name||';'
  from all_constraints a, all_constraints b
  where a.constraint_type = 'R'
  and a.r_constraint_name = b.constraint_name
  and a.r_owner  = b.owner
  and b.table_name in ('XXXX_ASSIGNMENT','WWV_FLOW_PAGE_PLUGS', 'WWV_FLOW_STEPS','WWV_FLOW_STEP_PROCESSING','WWV_FLOW_WORKSHEETS');
spool off;


##########################################################
Disable All REFERENCED Constraints Which are reported 
##########################################################

@/home/oracle/sunil/QVNTG1VNTG/disable_constraints.sql



##########################################################
TRUNCATE THE TABLES WHICH ARE FAILED  
##########################################################

truncate table APEX_030200.WWV_FLOW_WORKSHEETS;
truncate table APEX_030200.WWV_FLOW_STEP_PROCESSING;
truncate table APEX_030200.WWV_FLOW_STEPS;
truncate table APEX_030200.WWV_FLOW_PAGE_PLUGS;
truncate table XXXX.XXXXX_ASSIGNMENT;


###############################################
C) Join indexes on columns using CHAR semantics 
################################################

- "ORA-54028: cannot change the HIDDEN/VISIBLE property of a virtual column" during the change to UTF8

SELECT owner,
index_name,
table_owner,
table_name,
status,
index_type 
FROM dba_indexes
WHERE table_name IN
(SELECT UNIQUE (object_name) FROM dba_objects WHERE object_id IN
(SELECT UNIQUE obj# FROM sys.col$ WHERE property='8454440' ))
AND table_name IN
(SELECT UNIQUE (table_name) FROM dba_tab_columns WHERE char_used ='C' AND data_type IN ('CHAR','VARCHAR2'))
ORDER BY 1,2
/


If there are columns using CHAR semantics used as partitioning key or subpartition key the partitioned tables need to be exported, dropped and , after the change, imported.


##############################################################################
D) CHECK Function , Domain or Joined indexes on CHAR semantics columns.
##############################################################################

SELECT owner,
index_name,
table_owner,
table_name,
status,
index_type 
FROM dba_indexes
WHERE index_type IN ('DOMAIN','FUNCTION-BASED NORMAL','FUNCTION-BASED NORMAL/REV','FUNCTION-BASED BITMAP')
AND table_name IN
(SELECT UNIQUE (x.table_name) FROM dba_tab_columns x WHERE x.char_used ='C' AND x.data_type IN ('CHAR','VARCHAR2'))
ORDER BY 1,2
/

If there are functional or domain indexes on columns using CHAR semantics the index need to be dropped and recreated after the change. 

************************************************
The DDL of all those indexes can be found using:
************************************************


SET LONG 2000000
SET pagesize 0
EXECUTE dbms_metadata.set_transform_param(dbms_metadata.session_transform,'STORAGE',FALSE);

SELECT dbms_metadata.get_ddl('INDEX','index_name','owner')
FROM dba_indexes
WHERE index_type IN ('DOMAIN','FUNCTION-BASED NORMAL','FUNCTION-BASED NORMAL/REV','FUNCTION-BASED BITMAP')
AND table_name IN
(SELECT UNIQUE (x.table_name) FROM dba_tab_columns x WHERE x.char_used ='C' AND x.data_type IN ('CHAR','VARCHAR2'));

EXECUTE dbms_metadata.set_transform_param(dbms_metadata.session_transform,'DEFAULT');


*****************************************************************
DROP FUNCTION BASED INDEXES WHICH ARE REPORTED IN ABOVE STEP
*****************************************************************
-- in my case No  indexes are reported.

******************************************************************************
E) SYSTIMESTAMP in the DEFAULT value clause for tables using CHAR semantics
******************************************************************************
SET serveroutput ON
BEGIN
FOR rec IN
(SELECT owner,
table_name,
column_name,
data_default
FROM dba_tab_columns
WHERE char_used='C'
)
loop
IF upper(rec.data_default) LIKE '%TIMESTAMP%' THEN
dbms_output.put_line(rec.owner ||'.'|| rec.table_name ||'.'|| rec. column_name);
END IF;
END loop;
END;
/


If this gives rows back then the change to AL32UTF8 will fail with " ORA-604 error occurred at recursive SQL level %s , ORA-1866 the datetime class is invalid"  .
The workaround is to temporary change affected tables to use a DEFAULT NULL clause 

eg: ALTER TABLE tab MODIFY ( col ... DEFAULT NULL NOT NULL );

After the character set change the default clause can be restored.


***********************************
E) Clusters using CHAR semantics 
************************************


- "ORA-01447: ALTER TABLE does not operate on clustered columns" during the change to AL32UTF8

SELECT owner,
object_name
FROM all_objects
WHERE object_type = 'CLUSTER'
AND object_name IN
(SELECT UNIQUE (table_name) FROM dba_tab_columns WHERE char_used ='C'
)
ORDER BY 1,2
/


If this gives rows back then the change to UTF8 will fail with "ORA-01447: ALTER TABLE does not operate on clustered columns". Those clusters need to be dropped and recreated after the character set change.

*****************************************
F) Unused columns using CHAR semantics - 
*****************************************

SELECT owner,
table_name
FROM dba_unused_col_tabs
WHERE table_name IN
(SELECT UNIQUE (table_name) FROM dba_tab_columns WHERE char_used ='C'
)
ORDER BY 1,2
/


OUPUT:- 

OWNER                          TABLE_NAME
------------------------------ ------------------------------
SUN                             BLD_RESPONSE_HISTORY
SUN                             BLD_RESPONSE_HISTORY_AGG
SUN                             BLD_RESPONSE_HISTORY_HH_AGG
SUN                             RESPONSE_HISTORY
SUN                             RESPONSE_HISTORY_AGG
SUN                             RESPONSE_HISTORY_HH_AGG
SUN                             SWP_RESPONSE_HISTORY
SUN                             SWP_RESPONSE_HISTORY_AGG
SUN                             SWP_RESPONSE_HISTORY_HH_AGG

9 rows selected.



-- If this give any rows please drop those unused columns

set echo on
set time on
spool /home/oracle/sunil/unused_drop_columns.log
alter table SUN.BLD_RESPONSE_HISTORY DROP UNUSED COLUMNS;

alter table SUN.BLD_RESPONSE_HISTORY_AGG DROP UNUSED COLUMNS;

alter table SUN.BLD_RESPONSE_HISTORY_HH_AGG DROP UNUSED COLUMNS;

alter table SUN.RESPONSE_HISTORY DROP UNUSED COLUMNS;

alter table SUN.RESPONSE_HISTORY_AGG DROP UNUSED COLUMNS;

alter table SUN.RESPONSE_HISTORY_HH_AGG DROP UNUSED COLUMNS;

alter table SUN.SWP_RESPONSE_HISTORY DROP UNUSED COLUMNS;

alter table SUN.SWP_RESPONSE_HISTORY_AGG DROP UNUSED COLUMNS;

alter table SUN.SWP_RESPONSE_HISTORY_HH_AGG DROP UNUSED COLUMNS;
spool off;



**********************************************************************
G) Check if the compatible parameter is set to your base version
***********************************************************************

Sqlplus / as sysdba
sho parameter compatible

Do not try to migrate for example an 10g database with compatible=9.2

*********************************************************************************
H) for Oracle 11.2.0.3 , 11.2.0.2, 11.2.0.1 , 11.1.0.7 and 11.1.0.6  : check for SQL plan baselines and profiles
*********************************************************************************
select count(*) from dba_sql_plan_baselines;

  COUNT(*)
----------
         0


select count(*)  from dba_sql_profiles;

  COUNT(*)
----------
         0

If these selects give a result then apply first Patch 12989073 ORA-31011 ORA-19202 AFTER CHARACTER CONVERSION TO UTF8
If there is no patch and you try to change an older out of support Oracle RDBMS version then drop the SQL plan baselines and profiles before the conversion see note 1313386.1




#################################################################################
STEP 7) Run a SCAN for SYS schema. (Restrict Mode)
#################################################################################

A. re-run csscan on FULL DATABASE :

nohup csscan \"sys/xxxxx as sysdba\" parfile=csscan.par &

--- parfile=csscan.par

csscan.par :

USER=SYS
FROMCHAR=WE8ISO8859P1
TOCHAR=UTF8
ARRAY=1024000
LOG=SUNIL1_WE8ISO8859P1
CAPTURE=Y
PROCESS=60


#################################################################################
STEP 8) verify no more lossy data exist in view SUNIL1_WE8ISO8859P1.txt
#################################################################################


#################################################################################
STEP 9)  Analyze the SUNIL1_WE8ISO8859P1.txt and make sure you have the below  statements if not dont proceed furthure until fix  
#################################################################################

Under [Data Dictionary Conversion Summary]

The data dictionary can be safely migrated using the CSALTER script

                            (or)

under [Scan Summary]

All character type data in the data dictionary are convertible to the new character set
All character type application data remain the same in the new character set



###############################################################
STep 10) Recheck Below statements it should return no rows 
###############################################################

************************************************************
i) ReCheck Data Dictionery table needs any manual cleanup 
************************************************************

SELECT DISTINCT z.owner_name
|| '.'
   || z.table_name
|| '('
   || z.column_name
|| ') - '
|| z.column_type
|| ' - '
|| z.error_type
|| ' ' NotHandledDataDictColumns
FROM csmig.csmv$errors z
WHERE z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
) minus
SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
|| '('
|| z.column_name
|| ') - '
|| z.column_type
|| ' - '
|| z.error_type
|| ' ' DataDictConvCLob
FROM csmig.csmv$errors z
WHERE z.error_type ='CONVERTIBLE'
AND z.column_type = 'CLOB'
AND z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
 )
ORDER BY NotHandledDataDictColumns
 /


************************************************************
ii) Recheck Is there any date require expansion 
************************************************************

SET serveroutput ON size 200000
DECLARE
v_Stmt1 VARCHAR2(6000 BYTE);
BEGIN
DBMS_OUTPUT.PUT_LINE('output is: ');
DBMS_OUTPUT.PUT_LINE('owner.table_name column_name - column_type - max expansion in Bytes.');
DBMS_OUTPUT.PUT_LINE('....................................................................');
FOR rec IN
(select u.owner_name, u.table_name, u.column_name, u.column_type, max(max_post_convert_size) post_convert_size FROM csmig.csmv$columns u
WHERE u.exceed_size_rows > to_number('0') GROUP BY u.owner_name, u.table_name, u.column_name, u.column_type ORDER BY u.owner_name, u.table_name, u.column_name)
LOOP
IF rec.post_convert_size <= '2000' and rec.column_type = 'CHAR' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
END IF;
IF rec.post_convert_size > '2000' and rec.column_type = 'CHAR' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
dbms_output.put_line('! Warning !');
dbms_output.put_line('Data in '|| rec.owner_name ||'.'|| rec.table_name ||'.'|| rec.column_name ||' will expand OVER the datatype limit of CHAR !');
IF rec.post_convert_size > '4000' THEN
dbms_output.put_line('The '||rec.column_name ||' CHAR column NEED to change to CLOB to solve the truncation!');
END IF;
IF rec.post_convert_size <= '4000' THEN
dbms_output.put_line('The '||rec.column_name ||' CHAR column NEED to change to VARCHAR2 to solve the truncation!');
END IF;
dbms_output.put_line('Using CHAR semantics for this column without changing datatype will NOT solve the truncation!');
END IF;
IF rec.post_convert_size <= '4000' and rec.column_type = 'VARCHAR2' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size ||' Bytes .');
END IF;
IF rec.post_convert_size > '4000' and rec.column_type = 'VARCHAR2' THEN
DBMS_OUTPUT.PUT_LINE( rec.owner_name ||'.'|| rec.table_name ||' ('|| rec.column_name ||') - '|| rec.column_type ||' - '|| rec.post_convert_size || ' Bytes .');
dbms_output.put_line('! Warning !');
dbms_output.put_line('Data in '|| rec.owner_name ||'.'|| rec.table_name ||'.'|| rec.column_name ||' will expand OVER the datatype limit of VARCHAR2 !');
dbms_output.put_line('The '||rec.column_name ||' VARCHAR2 column NEED to change to CLOB to solve the truncation!');
dbms_output.put_line('Using CHAR semantics for this column without changing datatype will NOT solve the truncation!');
END IF;
END LOOP;
END;
/

************************************************************
iii) Application data which need manual intervention 
************************************************************

SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
FROM csmig.csmv$errors z
minus
SELECT DISTINCT z.owner_name
|| '.'
|| z.table_name
FROM csmig.csmv$errors z
WHERE z.error_type ='CONVERTIBLE'
AND z.column_type = 'CLOB'
AND z.owner_name IN
(SELECT DISTINCT username FROM csmig.csm$dictusers
)
ORDER BY 1
/

*********************************************************************
iv ) run again utlrp.sql 
*********************************************************************

@?/rdbms/admin/utlrp.sql


SQL> select count(*) from dba_objects where status='INVALID';

  COUNT(*)
----------
       660


#################################################################################
STEP 10) Set parameter values & Restart instance to clear any inactive sessions
#################################################################################

SQL>ALTER SYSTEM SET JOB_QUEUE_PROCESSES=0 scope=spfile sid='*';

SQL> alter system set aq_tm_processes=0 scope=spfile sid='*';

SQL> alter system set cluster_database=FALSE scope=spfile sid='*';

SQL> exit

srvctl stop database -d


sqlplus / as sysdba

startup restrict

----------Note: Check of any sessions, if there kill them ------------------

select username from gv$session where user is not null;

select sid,serial# from gv$session where user ='SYS';

select 'alter system kill session ''' ||sid|| ',' || serial#|| ''' immediate;' from gv$session where user is not null;

It should return only one session which is our session.

SQL> shutdown immediate;

SQL> startup restrict;


#################################################################################
STEP 11) Run conversion
#################################################################################

SQL>@$ORACLE_HOME/rdbms/admin/csalter.plb

---------
OUTPUT:-   Its taken around 10 mins to complete.  ( 600 G database size ) 
---------
SQL> @$ORACLE_HOME/rdbms/admin/csalter.plb

0 rows created.
Function created.
Function created.
Procedure created.
This script will update the content of the Oracle Data Dictionary.
Please ensure you have a full backup before initiating this procedure.
Would you like to proceed (Y/N)?Y
old   6:     if (UPPER('&conf') <> 'Y') then
new   6:     if (UPPER('Y') <> 'Y') then

Checking data validity...
begin converting system objects
324 rows in table SYS.WRI$_DBU_FEATURE_USAGE are converted
2504 rows in table SYS.WRH$_SQLTEXT are converted
1490 rows in table SYSMAN.MGMT_IP_REPORT_ELEM_PARAMS are converted
171 rows in table SYS.WRI$_DBU_FEATURE_METADATA are converted
78 rows in table MDSYS.SDO_STYLES_TABLE are converted
2942 rows in table SYS.WRI$_ADV_OBJECTS are converted
15208 rows in table SYS.SCHEDULER$_EVENT_LOG are converted
27 rows in table SYS.WRI$_REPT_FILES are converted
21 rows in table SYS.WRI$_DBU_HWM_METADATA are converted
46670 rows in table SYS.WRH$_SQL_PLAN are converted
5 rows in table MDSYS.SDO_XML_SCHEMAS are converted
130 rows in table SYSMAN.MGMT_IP_ELEM_DEFAULT_PARAMS are converted
179 rows in table SYS.METASTYLESHEET are converted
1 row in table SYS.RULE$ is converted
5776 rows in table SYS.WRI$_ADV_ACTIONS are converted
5 rows in table SYS.WRI$_ADV_DIRECTIVE_META are converted
7947 rows in table SYS.WRI$_ADV_RATIONALE are converted
31 rows in table SYSMAN.MGMT_IP_SQL_STATEMENTS are converted
15 rows in table SYSTEM.PLAN_TABLE are converted
21 rows in table SYS.SNAP$ are converted
2614 rows in table MDSYS.SDO_COORD_OP_PARAM_VALS are converted
2 rows in table SYS.EXTERNAL_TAB$ are converted
1 row in table MDSYS.SDO_GEOR_XMLSCHEMA_TABLE is converted

PL/SQL procedure successfully completed.
Alter the database character set...
CSALTER operation completed, please restart database
PL/SQL procedure successfully completed.
0 rows deleted.
Function dropped.
Function dropped.
Procedure dropped.


-- validate conversion

SQL> select * from nls_database_parameters;

check the NLS_CHARACTERSET is converted to UTF8

set lines 250

SELECT PARAMETER, VALUE FROM NLS_DATABASE_PARAMETERS WHERE PARAMETER IN ('NLS_CHARACTERSET', 'NLS_NCHAR_CHARACTERSET');

PARAMETER                      VALUE
------------------------------ -------------------------------------------------------------------------------------------
NLS_CHARACTERSET               UTF8
NLS_NCHAR_CHARACTERSET         AL16UTF16

-- set parameters back

SQL> ALTER SYSTEM SET JOB_QUEUE_PROCESSES=1000  scope=both sid='*';

SQL> ALTER SYSTEM SET AQ_TM_PROCESSES=1  scope=both sid='*';

SQL> alter system set cluster_database=TRUE scope=spfile sid='*';

-- restart database

SQL> shutdown immediate;

SQL> exit

srvctl start database -d

srvctl status database -d

#################################################################
STEP 12) Start Import The Tables Data which are exported 
##################################################################

-- Import CB.BLD_CB_ACCOUNTS

userid ="/ as sysdba"
directory=DATA_PUMPS
TABLES=XXXX.BLD_CB_ACCOUNTS
dumpfile=xxxx_BLD_CB_ACCOUNTS.dmp
logfile=xxx_BLD_CB_AC_imp.dmp
job_name=trunc_tables 
CONTENT=DATA_ONLY
table_exists_action=truncate




--- import rest of the tables 



userid ="/ as sysdba"
directory=DATA_PUMPS
parallel = 16
cluster = n
dumpfile=XXXX_trunc_tables_%U.dmp
logfile=trunc_tables_imp_xxxxx.log
job_name=trunc_tables 
CONTENT=DATA_ONLY
table_exists_action=truncate



-- Let it run we will hit all errors  like below  from these errors needs to collect table name and column names which needs expansion.

grep 'ORA-12899\|ORA-02374\|KUP-11007\|ORA-02372\|ORA-31693\|ORA-01732' trunc_tables_imp_XXXx.log > output.txt

-- to sort out unique rows 

sort ouptut.txt |uniq


ORA-31693: Table data object "XXX"."HOUSEHOLDS":"HOUSEHOLDS_RHI" failed to load/unload and is being skipped due to error:
ORA-01732: data manipulation operation not legal on this view
ORA-31693: Table data object "XXX"."HOUSEHOLDS":"HOUSEHOLDS_ROA" failed to load/unload and is being skipped due to error:
ORA-01732: data manipulation operation not legal on this view


-- In Above Errors Due to Materilzed View on that Table. find out which is view on that table or not.

SQL>  select object_name,object_type,owner from dba_objects where object_name='ALL_HOUSEHOLDS' and owner='CM' and object_type like '%VIEW%';

OBJECT_NAME                         OBJECT_TYPE         OWNER
----------------------------------- ------------------- ------------------------------
HOUSEHOLDS                      MATERIALIZED VIEW   XXXXX



If we do complete refresh above materilized view that will fix the issue. 

SQL> execute dbms_mview.refresh ('XXX.HOUSEHOLDS', 'C', atomic_refresh=> FALSE);

-- Gather Stats on This Table

SQL> exec dbms_stats.gather_table_stats('XXX','HOUSEHOLDS',estimate_percent=>100,granularity=>'PARTITION',cascade=>TRUE,no_invalidate=>FALSE);

-- Now check the rows in table parititions 

select  table_name, partition_name, global_stats, last_analyzed, num_rows
from dba_tab_partitions
where table_name='_HOUSEHOLDS'
and table_owner='XXXX'
order by 1, 2, 4 desc nulls last;

~~~~~~~~
OUTPUT:-
~~~~~~~~

TABLE_NAME                     PARTITION_NAME                 GLO LAST_ANAL   NUM_ROWS
------------------------------ ------------------------------ --- --------- ----------
         HOUSEHOLDS                 HOUSEHOLDS_PHX                 YES 22-MAY-15          0
         HOUSEHOLDS                 HOUSEHOLDS_RHI                 YES 22-MAY-15     579025
        HOUSEHOLDS                 HOUSEHOLDS_ROA                 YES 22-MAY-15      98839
        HOUSEHOLDS                 HOUSEHOLDS_SAB                 YES 22-MAY-15          0
        HOUSEHOLDS                 HOUSEHOLDS_SAN                 YES 22-MAY-15          0
        HOUSEHOLDS                 HOUSEHOLDS_TUL                 YES 22-MAY-15          0
       HOUSEHOLDS                 HOUSEHOLDS_WTX                 YES 22-MAY-15          0


###############################################
STEP 13)  Enable the foreign keys back
###############################################

We gathered enable constraints on  which we gathered script on 4.C

@/home/oracle/sunil/QVNTG1VNTG/enable_constraints.sql


##################################################################
STEP 14) RECREATE THE FUNCTION BASED INDEXES WHICH ARE DROPPED 
##################################################################

-- None in my Case


#################################################################################
STEP 15)Validate the indexes resulted on the scan report.
#################################################################################

-- you can find rebuild indexes in csscan .txt file at end of the file. 

Check if the reported indexes are in valid state, if not rebuild the index.

-- In my case none of indexes needs rebuild. 

-- set time on
-- set echo on
-- spool /home/oracle/sunil/QVNTG1/rebuil_indexes.log
-- alter index APEX_030200.WWV_FLOW_WORKSHEETS_UNQ_IDX rebuild online;
-- alter index APEX_030200.WWV_FLOW_WORKSHEETS_UNQ_IDX rebuild online;
-- alter index APEX_030200.WWV_FLOW_WORKSHEETS_UNQ_IDX rebuild online;
-- alter index APEX_030200.WWV_FLOW_WORKSHEETS_UNQ_IDX rebuild online;
-- alter index APEX_030200.WWV_FLOW_WS_UNQ_ALIAS_IDX rebuild online;
-- alter index APEX_030200.WWV_FLOW_WS_UNQ_ALIAS_IDX rebuild online;

-- spool off;

#################################################################################
STEP 16)Check for invalid objects after conversion.
#################################################################################

If there are any invalid objects run utlrp.sql


@?/rdbms/admin/utlrp.sql

SQL> select count(*) from dba_objects where status='INVALID';

  COUNT(*)
----------
       660



Documents I have followed -> 
AL32UTF8 / UTF8 (Unicode) Database Character Set Implications (Doc ID 788156.1)
Changing the NLS_CHARACTERSET to AL32UTF8 / UTF8 (Unicode) in 8i, 9i , 10g and 11g (Doc ID 260192.1)
Csscan Output Explained (Doc ID 444701.1)
Solving Convertible or Lossy data in Data Dictionary objects reported by Csscan when changing the NLS_CHARACTERSET (Doc ID 258904.1)





No comments:

Post a Comment