Implement Oracle Data Capture(CDC)
جهت راه اندازی CDC یک دیتابیس مبدا با نام SOURCE و یک دیتابیس مقصد جهت نگهداری تغییرات با نام STAGE آماده میکنیم.
مرحله اول
اعمال تغییرات روی پارامترهای دیتابیس Source:
alter system set log_archive_format=’arch1_%s_%t_%r.dbf’ scope=spfile sid=’*’;
alter system set lOG_ARCHIVE_DEST_1=’LOCATION=+FRA mandatory reopen=5 VALID_FOR=(ALL_LOGFILES,ALL_ROLES) DB_UNIQUE_NAME=primary’ scope=both sid=’*’;
alter system set log_archive_dest_2=’service=stg lgwr async optional noregister reopen=5 valid_for=(online_logfile,primary_role)’ scope=both sid=’*’;
alter system set log_archive_dest_state_1=ENABLE scope=spfile sid=’*’;
alter system set log_archive_dest_state_2=ENABLE scope=spfile sid=’*’;
ALTER DATABASE FORCE LOGGING;
ALTER DATABASE ADD SUPPLEMENTAL LOG DATA;
alter table emp add supplemental log data (all) columns ;
alter table dept add supplemental log data (all) columns ;
ARCHIVELOG mode.
copy password file from source to stg /u02
مرحله دوم
اعمال تغییرات روی پارامترهای دیتابیس STAGE:
alter system set GLOBAL_NAMES=TRUE scope=spfile sid=’*’;
alter system set JAVA_POOL_SIZE=50M scope=spfile sid=’*’;
alter system set LOG_ARCHIVE_DEST_1=’Location=/u01/STG/archivelogs valid_for=(online_logfile,primary_role)’scope=spfile;
alter system set LOG_ARCHIVE_DEST_2=’Location=/u01/STG/stdby mandatory valid_for=(standby_logfile,primary_role)’scope=spfile;
alter system set log_archive_dest_state_1=ENABLE scope=spfile sid=’*’;
alter system set log_archive_dest_state_2=ENABLE scope=spfile sid=’*’;
alter system set LOG_ARCHIVE_FORMAT=’arch1_%s_%t_%r.dbf’ scope=spfile sid=’*’;
alter system set JOB_QUEUE_PROCESSES=2 scope=spfile sid=’*’;
alter system set PARALLEL_MAX_SERVERS=(current value) + (5 * (the number of change sets planned))
alter system set PROCESSES=(current value) + (7 * (the number of change sets planned))
alter system set REMOTE_ARCHIVE_ENABLE=TRUE scope=spfile sid=’*’;
alter system set SESSIONS=(current value)+ (2 * (the number of change sets planned))
alter system set STREAMS_POOL_SIZE=50M scope=both sid=’*’; (current value) + ((the number of change sets planned) * (21 MB))
مرحله سوم
ساخت استندبای Redo Log روی دیتابیس Stage:
alter database add standby logfile group 4 size 512m;
alter database add standby logfile group 5 size 512m;
alter database add standby logfile group 6 size 512m;
alter database add standby logfile group 7 size 512m;
alter database add standby logfile group 8 size 512m;
مرحله چهارم
ساخت کاربر Publisher و اعطای مجوز های لازم در دیتابیس STAGE:
CREATE USER cdcpub IDENTIFIED BY cdcpub DEFAULT TABLESPACE ts_cdcpub
QUOTA UNLIMITED ON SYSTEM
QUOTA UNLIMITED ON SYSAUX;
GRANT CREATE SESSION TO cdcpub;
GRANT CREATE TABLE TO cdcpub;
GRANT CREATE TABLESPACE TO cdcpub;
GRANT UNLIMITED TABLESPACE TO cdcpub;
GRANT SELECT_CATALOG_ROLE TO cdcpub;
GRANT EXECUTE_CATALOG_ROLE TO cdcpub;
GRANT DBA TO cdcpub;
GRANT CREATE SEQUENCE TO cdcpub;
GRANT EXECUTE on DBMS_CDC_PUBLISH TO cdcpub;
EXECUTE DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE(grantee => ‘cdcpub’);
مرحله پنجم
ساخت LogMiner data dictionary روی دیتابیس Source:
SET SERVEROUTPUT ON
VARIABLE f_scn NUMBER;
BEGIN
:f_scn := 0;
DBMS_CAPTURE_ADM.BUILD(:f_scn);
DBMS_OUTPUT.PUT_LINE(‘The first_scn value is ‘ || :f_scn);
END;
/
The first_scn value is 938275
مرحله ششم
آماده سازی جداول در دیتابیس Source:
begin
dbms_capture_adm.prepare_table_instantiation(table_name =>’sayyar.ACCOUNT’);
dbms_capture_adm.prepare_table_instantiation(table_name =>’sayyar.BANK’);
end;
/
مرحله هفتم
پیدا کردن GLOBAL_NAME در دیتابیس Source:
SELECT GLOBAL_NAME FROM GLOBAL_NAME;
RAC
مرحله هشتم
ساخت change sources جهت شناسایی تعییرات دیتابیس Source روی دیتابیس Stage:
connect cdcpub/cdcpub
BEGIN
DBMS_CDC_PUBLISH.CREATE_AUTOLOG_CHANGE_SOURCE(
change_source_name => ‘acc_bank_src’,
description => ‘account and bank source’,
source_database => ‘RAC’,
first_scn => 938275,
online_log => ‘y’);
END;
/
مرحله نهم
ساخت change sets روی دیتابیس Stage:
BEGIN
DBMS_CDC_PUBLISH.CREATE_CHANGE_SET(
change_set_name => ‘acc_bnk_set’,
description => ‘acc and bank Change set’,
change_source_name => ‘acc_bank_src’,
stop_on_ddl => ‘y’);
END;
/
مرحله دهم
ساخت change table روی دیتابیس STAGE:
connect cdcpub/cdcpub
begin
dbms_cdc_publish.create_change_table(
owner => ‘cdcpub’,
change_table_name => ‘acc_ct’,
change_set_name => ‘acc_bnk_set’,
source_schema => ‘SAYYAR’,
source_table => ‘ACCOUNT’,
column_type_list => ‘ACCOUNT_DESC VARCHAR2(255 BYTE),
ACCT_NO VARCHAR2(255 BYTE),
ACTUAL_BALANCE FLOAT(126),
AVAILABLE_BALANCE FLOAT(126),
BLOCKED_AMOUNT FLOAT(126),
BRANCH_ID NUMBER(19)’,
capture_values => ‘both’,
rs_id => ‘y’,
row_id => ‘n’,
user_id => ‘n’,
timestamp => ‘y’,
object_id => ‘n’,
source_colmap => ‘n’,
target_colmap => ‘y’,
options_string => null) ;
end ;
/
begin
dbms_cdc_publish.create_change_table(
owner => ‘cdcpub’,
change_table_name => ‘bank_ct’,
change_set_name => ‘acc_bnk_set’,
source_schema => ‘SAYYAR’,
source_table => ‘BANK’,
column_type_list => ‘BANK_CODE VARCHAR2(255 BYTE),
BANK_TITLE VARCHAR2(255 BYTE)’,
capture_values => ‘both’,
rs_id => ‘y’,
row_id => ‘n’,
user_id => ‘n’,
timestamp => ‘y’,
object_id => ‘n’,
source_colmap => ‘n’,
target_colmap => ‘y’,
options_string => null) ;
end;
/
مرحله یازدهم
فعال سازی change set در دیتابیس STAGE:
BEGIN
DBMS_CDC_PUBLISH.ALTER_CHANGE_SET(
change_set_name => ‘acc_bnk_set’,
enable_capture => ‘y’);
END;
/
مرحله دوازدهم
Switch the redo log روی دیتابیس Source:
alter system switch logfile ;
مرحله تست:
select * from bank;
insert into sayyar.bank values (aa.nextval,0,’14’,’test’);
Insert into SAYYAR.BANK
(ID, VERSION, BANK_CODE, BANK_TITLE)
Values
(159, 0, ’11’, ‘محسن’);
update SAYYAR.BANK set BANK_TITLE=’mohsen’
where id =159;
delete from SAYYAR.BANK where id =159;
OPERATION$ | CSCN$ | COMMIT_TIMESTAMP$ | XIDUSN$ | XIDSLT$ |
XIDSEQ$ |
RSID$ | TIMESTAMP$ | BANK_CODE | BANK_TITLE |
I | 967,396 | 10/08/2018 16:40:04 | 10 | 27 | 675 | 10,052 | 10/08/2018 16:39:59 | 14 | test |
I | 967,396 | 10/08/2018 16:40:04 | 10 | 27 | 675 | 10,053 | 10/08/2018 16:40:01 | 14 | test |
I | 967,418 | 10/08/2018 16:40:26 | 4 | 26 | 667 | 10,054 | 10/08/2018 16:40:24 | 11 | محسن |
UO | 967,424 | 10/08/2018 16:40:33 | 6 | 16 | 903 | 10,055 | 10/08/2018 16:40:32 | 11 | محسن |
UN | 967,424 | 10/08/2018 16:40:33 | 6 | 16 | 903 | 10,055 | 10/08/2018 16:40:32 | 11 | mohsen |
D | 967,448 | 10/08/2018 16:40:58 | 5 | 25 | 900 | 10,056 | 10/08/2018 16:40:56 | 11 | mohsen |
OPERATION:
- I(insert)
- UO(data befor update)
- UN(data after update)
- D(delete)
IN ENGLISH
Change These parameter’s in Source Database:
alter system set log_archive_format=’arch1_%s_%t_%r.dbf’ scope=spfile sid=’*’;
alter system set lOG_ARCHIVE_DEST_1=’LOCATION=+FRA mandatory reopen=5 VALID_FOR=(ALL_LOGFILES,ALL_ROLES) DB_UNIQUE_NAME=primary’ scope=both sid=’*’;
alter system set log_archive_dest_2=’service=stg lgwr async optional noregister reopen=5 valid_for=(online_logfile,primary_role)’ scope=both sid=’*’;
alter system set log_archive_dest_state_1=ENABLE scope=spfile sid=’*’;
alter system set log_archive_dest_state_2=ENABLE scope=spfile sid=’*’;
ALTER DATABASE FORCE LOGGING;
ALTER DATABASE ADD SUPPLEMENTAL LOG DATA;
alter table emp add supplemental log data (all) columns ;
alter table dept add supplemental log data (all) columns ;
ARCHIVELOG mode.
copy password file from source to stg /u02
Change These parameter’s in Stage Database:
alter system set GLOBAL_NAMES=TRUE scope=spfile sid=’*’;
alter system set JAVA_POOL_SIZE=50M scope=spfile sid=’*’;
alter system set LOG_ARCHIVE_DEST_1=’Location=/u01/STG/archivelogs valid_for=(online_logfile,primary_role)’scope=spfile;
alter system set LOG_ARCHIVE_DEST_2=’Location=/u01/STG/stdby mandatory valid_for=(standby_logfile,primary_role)’scope=spfile;
alter system set log_archive_dest_state_1=ENABLE scope=spfile sid=’*’;
alter system set log_archive_dest_state_2=ENABLE scope=spfile sid=’*’;
alter system set LOG_ARCHIVE_FORMAT=’arch1_%s_%t_%r.dbf’ scope=spfile sid=’*’;
alter system set JOB_QUEUE_PROCESSES=2 scope=spfile sid=’*’;
alter system set PARALLEL_MAX_SERVERS=(current value) + (5 * (the number of change sets planned))
alter system set PROCESSES=(current value) + (7 * (the number of change sets planned))
alter system set REMOTE_ARCHIVE_ENABLE=TRUE scope=spfile sid=’*’;
alter system set SESSIONS=(current value)+ (2 * (the number of change sets planned))
alter system set STREAMS_POOL_SIZE=50M scope=both sid=’*’; (current value) + ((the number of change sets planned) * (21 MB))
alter database add standby logfile group 4 size 512m;
alter database add standby logfile group 5 size 512m;
alter database add standby logfile group 6 size 512m;
alter database add standby logfile group 7 size 512m;
alter database add standby logfile group 8 size 512m;
Step 5 Staging Database DBA: Create and grant privileges to the publisher.
CREATE USER cdcpub IDENTIFIED BY cdcpub DEFAULT TABLESPACE ts_cdcpub
QUOTA UNLIMITED ON SYSTEM
QUOTA UNLIMITED ON SYSAUX;
GRANT CREATE SESSION TO cdcpub;
GRANT CREATE TABLE TO cdcpub;
GRANT CREATE TABLESPACE TO cdcpub;
GRANT UNLIMITED TABLESPACE TO cdcpub;
GRANT SELECT_CATALOG_ROLE TO cdcpub;
GRANT EXECUTE_CATALOG_ROLE TO cdcpub;
GRANT DBA TO cdcpub;
GRANT CREATE SEQUENCE TO cdcpub;
GRANT EXECUTE on DBMS_CDC_PUBLISH TO cdcpub;
EXECUTE DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE(grantee => ‘cdcpub’);
Step 6 Source Database DBA: Build the LogMiner data dictionary.
SET SERVEROUTPUT ON
VARIABLE f_scn NUMBER;
BEGIN
:f_scn := 0;
DBMS_CAPTURE_ADM.BUILD(:f_scn);
DBMS_OUTPUT.PUT_LINE(‘The first_scn value is ‘ || :f_scn);
END;
/
The first_scn value is 938275
Step 7 Source Database DBA: Prepare the source tables.
begin
dbms_capture_adm.prepare_table_instantiation(table_name =>’sayyar.ACCOUNT’);
dbms_capture_adm.prepare_table_instantiation(table_name =>’sayyar.BANK’);
end;
/
Step 8 Source Database DBA: Get the global name of the source database.
SELECT GLOBAL_NAME FROM GLOBAL_NAME;
RAC
Step 9 Staging Database Publisher: Identify each change source database and create the change sources.
connect cdcpub/cdcpub
BEGIN
DBMS_CDC_PUBLISH.CREATE_AUTOLOG_CHANGE_SOURCE(
change_source_name => ‘acc_bank_src’,
description => ‘account and bank source’,
source_database => ‘RAC’,
first_scn => 938275,
online_log => ‘y’);
END;
/
Step 10 Staging Database Publisher: Create change sets.
BEGIN
DBMS_CDC_PUBLISH.CREATE_CHANGE_SET(
change_set_name => ‘acc_bnk_set’,
description => ‘acc and bank Change set’,
change_source_name => ‘acc_bank_src’,
stop_on_ddl => ‘y’);
END;
/
Step 11 Staging Database Publisher: Create the change tables.
connect cdcpub/cdcpub
begin
dbms_cdc_publish.create_change_table(
owner => ‘cdcpub’,
change_table_name => ‘acc_ct’,
change_set_name => ‘acc_bnk_set’,
source_schema => ‘SAYYAR’,
source_table => ‘ACCOUNT’,
column_type_list => ‘ACCOUNT_DESC VARCHAR2(255 BYTE),
ACCT_NO VARCHAR2(255 BYTE),
ACTUAL_BALANCE FLOAT(126),
AVAILABLE_BALANCE FLOAT(126),
BLOCKED_AMOUNT FLOAT(126),
BRANCH_ID NUMBER(19)’,
capture_values => ‘both’,
rs_id => ‘y’,
row_id => ‘n’,
user_id => ‘n’,
timestamp => ‘y’,
object_id => ‘n’,
source_colmap => ‘n’,
target_colmap => ‘y’,
options_string => null) ;
end ;
/
begin
dbms_cdc_publish.create_change_table(
owner => ‘cdcpub’,
change_table_name => ‘bank_ct’,
change_set_name => ‘acc_bnk_set’,
source_schema => ‘SAYYAR’,
source_table => ‘BANK’,
column_type_list => ‘BANK_CODE VARCHAR2(255 BYTE),
BANK_TITLE VARCHAR2(255 BYTE)’,
capture_values => ‘both’,
rs_id => ‘y’,
row_id => ‘n’,
user_id => ‘n’,
timestamp => ‘y’,
object_id => ‘n’,
source_colmap => ‘n’,
target_colmap => ‘y’,
options_string => null) ;
end;
/
Step 12 Staging Database Publisher: Enable the change set.
BEGIN
DBMS_CDC_PUBLISH.ALTER_CHANGE_SET(
change_set_name => ‘acc_bnk_set’,
enable_capture => ‘y’);
END;
/
Step 13 Source Database DBA: Switch the redo log files at the source database.
alter system switch logfile ;
TEST:
select * from bank;
insert into sayyar.bank values (aa.nextval,0,’14’,’test’);
Insert into SAYYAR.BANK
(ID, VERSION, BANK_CODE, BANK_TITLE)
Values
(159, 0, ’11’, ‘محسن’);
update SAYYAR.BANK set BANK_TITLE=’mohsen’
where id =159;
delete from SAYYAR.BANK where id =159;
OPERATION$ | CSCN$ | COMMIT_TIMESTAMP$ | XIDUSN$ | XIDSLT$ | XIDSEQ$ | RSID$ | TIMESTAMP$ | BANK_CODE | BANK_TITLE |
I | 967,396 | 10/08/2018 16:40:04 | 10 | 27 | 675 | 10,052 | 10/08/2018 16:39:59 | 14 | test |
I | 967,396 | 10/08/2018 16:40:04 | 10 | 27 | 675 | 10,053 | 10/08/2018 16:40:01 | 14 | test |
I | 967,418 | 10/08/2018 16:40:26 | 4 | 26 | 667 | 10,054 | 10/08/2018 16:40:24 | 11 | محسن |
UO | 967,424 | 10/08/2018 16:40:33 | 6 | 16 | 903 | 10,055 | 10/08/2018 16:40:32 | 11 | محسن |
UN | 967,424 | 10/08/2018 16:40:33 | 6 | 16 | 903 | 10,055 | 10/08/2018 16:40:32 | 11 | mohsen |
D | 967,448 | 10/08/2018 16:40:58 | 5 | 25 | 900 | 10,056 | 10/08/2018 16:40:56 | 11 | mohsen |
OPERATION:
- I(insert)
- UO(data befor update)
- UN(data after update)
- D(delete)