Oracle Streams

Hi, My name is Jimmy Sam. If you need DBA help, please email to jimmy_sam001@yahoo.com for further discussion. Thanks.
NOTE:733691.1 - How To Setup Schema Level Streams Replication with a Downstream Capture Process with Implicit Log Assignment [ID 733691.1] 10g 11g
NOTE:733691.1 


NOTE:753158.1 - How To Configure an Oracle Streams Real-Time Downstream Capture Environment
NOTE:753158.1 


NOTE:1264598.1 - Master Note for Streams Downstream Capture - 10g and 11g [Video]
NOTE:1264598.1 

NOTE:418755.1 - stream master notes
NOTE:418755.1 
bi-directional 

9ir2
Oracle Base 
9ir2 simple example  9ir2 stream  9ir2 all 
10gr1: Oracle Streams Replication Administrator's Guide simple example  concept and admin  admin guide  queuing  10gr1 all  10gr2: http://docs.oracle.com/cd/B19306_01/server.102/b14228/config_simple.htm simple example  10gr2  concept and admin  admin guide  queuing  10gr2 all  http://docs.oracle.com/cd/B28359_01/server.111/b28321/toc.htm Oracle Streams 11GR1 Concepts and Administration  http://docs.oracle.com/cd/B28359_01/server.111/b28322/toc.htm Oracle Streams 11GR1 Replication Administrator's Guid   http://docs.oracle.com/cd/B28359_01/server.111/b28420/toc.htm Oracle Streams 11GR1 Advanced Queuing User's Guide  Oracle Streams Concepts and Administration http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_over.htm#i1006084 overview  http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_prop.htm#STRMS003 Propagation Concept and Method  http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_mprep.htm#strms_initialization_parameters prep: user, tbs, permission  http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_ccap.htm#BEHBDBAG capture, downstream dataguard  http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_cprop.htm#BABDEAAD propagation  http://docs.oracle.com/cd/B28359_01/server.111/b28321/strms_capply.htm#i1010532 apply http://www.oracle.com/technetwork/database/features/data-integration/twp-streams-11gr1-134658.pdf white paper 11gr1 July 2007  11gr2: http://docs.oracle.com/cd/E11882_01/server.112/e12862/toc.htm simple example Oracle Streams Extended Examples 11g Release  11gr2  concept and admin  admin guide  queuing  11gr2 all 

downstream stream repliction setup

-- on both source and target db
CREATE TABLESPACE streams_tbs DATAFILE '/usr/oracle/dbs/streams_tbs.dbf' 
  SIZE 25M REUSE AUTOEXTEND ON MAXSIZE UNLIMITED;

CREATE USER strmadmin IDENTIFIED BY strmadmin 
   DEFAULT TABLESPACE streams_tbs
   TEMPORARY TABLESPACE TEMP 
   QUOTA UNLIMITED ON streams_tbs;

GRANT connect, resource, aq_administrator_role, dba, select_catalog_role TO strmadmin;

-- exec DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE('strmadmin');
-- exec DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE( grantee => 'strmadmin', grant_privileges => TRUE);
   exec dbms_streams_auth.grant_admin_privilege('strmadmin',true);

SELECT * from dba_streams_administrator;

-- on source db
-- Use the GRANT_ADMIN_PRIVILEGE procedure to generate a script:
CREATE DIRECTORY admin_dir AS '/usr/admin';

BEGIN
  DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE(
    grantee          => 'strmadmin',    
    grant_privileges => FALSE,
    file_name        => 'grant_strms_privs.sql',
    directory_name   => 'admin_dir');
END;
/

-- this script is generated by previous two steps --
SET ECHO ON
SPOOL grant_strms_privs.out
@/usr/admin/grant_strms_privs.sql
SPOOL OFF
-- from downstream site
conn /as sysdba
exec DBMS_LOGMNR_D.SET_TABLESPACE ('streams_tbs');
-- both site: init.ora 
-- global_names=true 
show parameter global_name
select * from global_name;
Alter system set global_names=TRUE scope=BOTH; 

-- job queue must greater than 0, let's user greater than 10
show parameter job_queue

show parameter AQ_TM_PROCESSES 

show parameter db_recovery_file_dest
show parameter db_recovery_file_dest_size
-- enable supplemental logging
ALTER TABLE hr.departments ADD SUPPLEMENTAL LOG DATA (ALL) COLUMNS;

alter table emp add supplemental log group supp_log_emp (EMPNO,ENAME,JOB,MGR,HIREDATE,SAL,COMM,DEPTNO);
alter table dept add supplemental log group supp_log_dept (deptno,dname,loc);

select log_group_name, table_name from dba_log_groups where owner='SCOTT';
select log_group_name, table_name from dba_log_groups where owner='SCOTT';

-- on source db
alter system set log_archive_dest_1 = 'location=/bugmnt12/em/celclnx7/SR3.2274614446/user/strm1 mandatory reopen=5' scope=spfile;
alter system set log_archive_dest_state_1 = enable scope=spfile;

alter system set log_archive_format = 'strm1_%t_%s_%r.dbf' scope=spfile;

alter system set log_archive_dest_2 = 'service=strm2.net arch async noregister valid_for=(online_logfile,all_roles) template=/bugmnt12/em/celclnx7/SR3.2274614446/user/logsfromstrm1/strm1_%t_%s_%r.dbf db_unique_name=strm2' scope=spfile;
or

alter system set log_archive_dest_state_2 = enable scope=spfile;

alter system set log_archive_config='send,dg_config=(strm1,strm2)' scope=both ;
-- on downstream target database
alter system set log_archive_dest_1 = 'location=/bugmnt12/em/celclnx7/SR3.2274614446/user/strm2 mandatory reopen=5 valid_for=(online_logfile, primary_role)' scope=spfile;
alter system set log_archive_dest_state_1 = enable scope=spfile;

alter system set log_archive_format = 'strm2_%t_%s_%r.dbf' scope=spfile;

alter system set log_archive_dest_2 = 'location=/bugmnt12/em/celclnx7/SR3.2274614446/user/standby mandatory valid_for=(standby_logfile, primary_role)' scope=spfile;
-- or ALTER SYSTEM SET LOG_ARCHIVE_DEST_2='LOCATION=/home/oracle/archives/ORCL102D/standby-archives/ VALID_FOR=(STANDBY_LOGFILE,PRIMARY_ROLE)' SCOPE=SPFILE;
-- note 1: standby_logfile

alter system set log_archive_dest_state_2 = enable scope=spfile;

alter system set log_archive_config='receive,dg_config=(strm1,strm2)' scope=both;
-- shutdown and start both db, for new parameter to take effect
shutdown immediate;
startup
-- create standby logfiles
conn /as sysdba

select THREAD#, GROUP#, BYTES/1024/1024 from V$LOG;
SELECT GROUP#, THREAD#, SEQUENCE#, ARCHIVED, STATUS FROM V$STANDBY_LOG;
-- on primary
conn /as sysdba

ALTER DATABASE ADD STANDBY LOGFILE GROUP 4
('/home/oracle/archives/ORCL102D/standbylogs/slog4.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE GROUP 5
('/home/oracle/archives/ORCL102D/standbylogs/slog5.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE GROUP 6
('/home/oracle/archives/ORCL102D/standbylogs/slog6.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE GROUP 7
('/home/oracle/archives/ORCL102D/standbylogs/slog7.rdo') SIZE 50M;
-- on standby
conn /as sysdba

ALTER DATABASE ADD STANDBY LOGFILE THREAD 1 GROUP 4
('/home/oracle/archives/ORCL102D/standby-logs/slog4.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE THREAD 1 GROUP 5
('/home/oracle/archives/ORCL102D/standby-logs/slog5.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE THREAD 1 GROUP 6
('/home/oracle/archives/ORCL102D/standby-logs/slog6.rdo') SIZE 50M;


ALTER DATABASE ADD STANDBY LOGFILE THREAD 2 GROUP 7
('/home/oracle/archives/ORCL102D/standby-logs/slog7.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE THREAD 2 GROUP 8
('/home/oracle/archives/ORCL102D/standby-logs/slog8.rdo') SIZE 50M;

ALTER DATABASE ADD STANDBY LOGFILE THREAD 2 GROUP 9
('/home/oracle/archives/ORCL102D/standby-logs/slog9.rdo') SIZE 50M;
-- Increase the number of archiving processes:
ALTER SYSTEM SET log_archive_max_processes=5 SCOPE=BOTH;
-- create db link on both db
-- CREATE DATABASE LINK dbs2.example.com CONNECT TO strmadmin IDENTIFIED BY password USING 'dbs2.example.com';
drop public database link STRM2.NET;
create public database link STRM2.NET using 'STRM2.NET';

conn strmadmin/strmadmin
drop database link STRM2.NET;
create database link strm2.net connect to strmadmin identified by strmadmin;
select user from dual@STRM2.NET;
-- on target machine
drop public database link STRM1.NET;
create public database link STRM1.NET using 'STRM1.NET';

conn strmadmin/strmadmin
drop database link STRM1.NET;
create database link STRM1.NET connect to strmadmin identified by strmadmin;
select user from dual@STRM1.NET;
-- on target downstream db
connect strmadmin/strmadmin
BEGIN
  DBMS_STREAMS_ADM.SET_UP_QUEUE(
    queue_table  => 'STREAMS_QUEUE_TABLE',
    queue_name   => 'STREAMS_QUEUE',
    queue_user   => 'STRMADMIN');
END;
/

-- eg 1: exec dbms_streams_adm.set_up_queue(queue_table => 'apply_q', queue_name => 'apply_q', queue_user => 'strmadmin'); 
-- eg 2: exec dbms_streams_adm.set_up_queue(queue_table => 'capture_q',queue_name => 'capture_q',queue_user => 'strmadmin');

select name, queue_table from user_queues;

BEGIN
  DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
    schema_name     => 'USR1',
    streams_type    => 'APPLY',
    streams_name    => 'STRMADMIN_APPLY',
    queue_name      => 'STRMADMIN.STREAMS_QUEUE',
    include_dml     => true,
    include_ddl     => true,
    source_database => 'STRM1.NET');
END;
/

-- not needed, use the same queue for both apply and capture
BEGIN
DBMS_APPLY_ADM.CREATE_APPLY( queue_name => 'strmadmin.STREAM_QUEUE', apply_name => 'STRMADMIN_APPLY', apply_captured => TRUE);
END;
/
BEGIN DBMS_APPLY_ADM.ALTER_APPLY( apply_name => 'STRMADMIN_APPLY', apply_user => 'USR1'); END; / DECLARE v_started number; BEGIN SELECT decode(status, 'ENABLED', 1, 0) INTO v_started FROM DBA_APPLY WHERE APPLY_NAME = 'STRMADMIN_APPLY'; if (v_started = 0) then DBMS_APPLY_ADM.START_APPLY(apply_name => 'STRMADMIN_APPLY'); end if; END; / SELECT apply_name, status, queue_name FROM DBA_APPLY; SELECT parameter, value, set_by_user FROM DBA_APPLY_PARAMETERS WHERE apply_name = 'STRMADMIN_APPLY'; select apply_name, status from dba_apply;
-- on target db, create downstream capture process
BEGIN
  DBMS_CAPTURE_ADM.CREATE_CAPTURE(
    queue_name         => 'streams_queue',
    capture_name       => 'downstream_capture',
    rule_set_name      => NULL,
    start_scn          => NULL,
    source_database    => 'STRM1.NET',
    use_database_link  => true,
    first_scn          => NULL,
    logfile_assignment => 'implicit');
END;
/
BEGIN
DBMS_CAPTURE_ADM.SET_PARAMETER( capture_name => 'downstream_capture', parameter => 'downstream_real_time_mine', value => 'y');
END;
/

SELECT capture_name, status from dba_capture;

SELECT parameter, value, set_by_user FROM DBA_CAPTURE_PARAMETERS;

select * from sys.streams$_process_params;

select * from sys.streams$_capture_process;


-- note 1: use_database_link ==> true, automatically run DBMS_CAPTURE_ADMIN.BUILD at source db
-- note 2: logfile_assigment ==> 'implicit', auto apply new redo log, otherwise use "ALTER DATABASE REGISTER LOGICAL LOGFILE file_name FOR capture_process;"
-- In the DDL statement above, file_name is the name of the redo log file and capture_process is the name of the capture process that will use the redo log file at the downstream database. 
-- You must add redo log files manually only if the logfile_assignment parameter is set to explicit.

BEGIN
  DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
    schema_name     => 'USR1',
    streams_type    => 'CAPTURE',
    streams_name    => 'downstream_capture',
    queue_name      => 'STRMADMIN.STREAMS_QUEUE',
    include_dml     => true,
    include_ddl     => true,
    source_database => 'STRM1.NET');
END;
/

or
BEGIN
DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
schema_name => 'mars',
streams_type => 'capture',
streams_name => 'downstream_capture',
queue_name => 'strmadmin.downstream_q',
include_dml => true,
include_ddl => true,
include_tagged_lcr => false,
source_database => 'ORCL102C.EG.ORACLE.COM',
inclusion_rule => TRUE);
END;
/

SELECT rule_name, rule_condition FROM DBA_STREAMS_SCHEMA_RULES WHERE streams_name = 'DOWNSTREAM_CAPTURE' AND streams_type = 'CAPTURE';

-- instantiation
-- on source db
HOST exp USERID=SYSTEM/&system_pwd_source@STRM1.NET OWNER=USR1 FILE=usr1.dmp LOG=usr1_exp.log OBJECT_CONSISTENT=Y STATISTICS = NONE
-- note 1: using OBJECT_CONSISTENT=Y
-- on target db
HOST imp USERID=SYSTEM/&system_pwd_downstream@STRM2.NET FULL=Y CONSTRAINTS=Y FILE=usr1.dmp IGNORE=Y COMMIT=Y LOG=usr1_imp.log STREAMS_INSTANTIATION=Y
-- note 1: using STREAMS_INSTANTIATION=Y
select * from DBA_APPLY_INSTANTIATED_OBJECTS;
select * from DBA_APPLY_INSTANTIATED_SCHEMAS;
-- If the objects are already present in the destination database, there are two ways of instantiating the objects at the destination site.

-- 1. By means of Metadata-only export/import :
-- Specify ROWS=N during Export
-- Specify IGNORE=Y during Import along with above import parameters.

-- 2. By Manually instantiating the objects

set serveroutput on
DECLARE
  iscn NUMBER; -- Variable to hold instantiation SCN value
  BEGIN
    iscn := DBMS_FLASHBACK.GET_SYSTEM_CHANGE_NUMBER@STRM1.NET();
    DBMS_OUTPUT.PUT_LINE ('Instantiation SCN is: ' || iscn);
    DBMS_APPLY_ADM.SET_SCHEMA_INSTANTIATION_SCN(
      source_schema_name   => 'USR1',
      source_database_name => 'STRM1.NET',
      instantiation_scn    => iscn,
      recursive            => true);
END;
/

-- NOTE: Not only the objects must exists on source and target sites, also the objects must be in sync or ORA-1403 will be raised by the apply process

-- If using the parameter recursive true with SET_SCHEMA_INSTANTIATION_SCN then you need a dblink on the destination database to the source database with the same name as the source database.
-- on target db
begin
  DBMS_CAPTURE_ADM.START_CAPTURE(capture_name => 'DOWNSTREAM_CAPTURE');
end;
/
select capture_name, status from dba_capture;
SELECT APPLY_NAME, 
SOURCE_DATABASE, 
LOCAL_TRANSACTION_ID, 
ERROR_NUMBER,
ERROR_MESSAGE,
MESSAGE_COUNT
FROM DBA_APPLY_ERROR;

BEGIN
DBMS_APPLY_ADM.EXECUTE_ERROR(
local_transaction_id => '5.31.134',
execute_as_user => false);
END
/
GLOBAL_NAMES=TRUE
STREAMS_POOL_SIZE=??? --> MEMORY_TARGET, SGA_TARGET
COMPATIBLE 
ARCHIVELOG mode

-- grant permission

BEGIN
   DBMS_AQADM.GRANT_QUEUE_PRIVILEGE (
      privilege  => 'ENQUEUE', 
      queue_name => 'strmadmin.streams_queue',
      grantee    => 'hr');
END;
/

BEGIN
   DBMS_AQADM.GRANT_QUEUE_PRIVILEGE (
      privilege  => 'DEQUEUE', 
      queue_name => 'strmadmin.streams_queue',
      grantee    => 'hr');
END;
/

DBMS_STREAMS_ADM
DBMS_CAPTURE_ADM
DBMS_PROPAGATION_ADM
DBMS_APPLY_ADM
DBMS_STREAMS_AUTH
DBMS_STREAMS_ADVISOR_ADM

DBA_APPLY
DBA_APPLY_CONFLICT_COLUMNS
DBA_APPLY_ERROR
DBA_CAPTURE
DBA_PROPAGATION
DBA_STREAMS_COLUMNS
DBA_STREAMS_RULES
DBA_STREAMS_UNSUPPORTED
DBA_SYNC_CAPTURE

V$BUFFERED_QUEUES
V$PROPAGATION_RECEIVER
V$PROPAGATION_SENDER
V$STREAMS_APPLY_COORDINATOR
V$STREAMS_APPLY_READER
V$STREAMS_APPLY_SERVER
V$STREAMS_CAPTURE
V$STREAMS_TRANSACTION

DBA_RECOVERABLE_SCRIPT 
DBA_RECOVERABLE_SCRIPT_PARAMS
DBA_RECOVERABLE_SCRIPT_BLOCKS
DBA_RECOVERABLE_SCRIPT_ERRORS

table replication example


Sanjay Mishra Oracle Streams: Making Data Flow
https://www.oracle.com/technology/oramag/oracle/04-nov/o64streams.html
o64streams

-- on both db
CREATE TABLESPACE streams_tbs 
DATAFILE '/u01/app/oracle/oradata/TEST10G1/streams_tbs.dbf' SIZE 25M;

CREATE USER strmadmin 
IDENTIFIED BY strmadmin
DEFAULT TABLESPACE streams_tbs
TEMPORARY TABLESPACE temp
QUOTA UNLIMITED ON streams_tbs;

GRANT CONNECT, RESOURCE, DBA TO strmadmin;

BEGIN
  DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE(
    grantee         => 'strmadmin',    
    grant_privileges => true);
END;
/

GRANT SELECT_CATALOG_ROLE TO strmadmin;
GRANT SELECT ANY DICTIONARY TO strmadmin;

-- init.ora
GLOBAL_NAMES=TRUE;
COMPATIBLE=10.2.0
JOB_QUEUE_PROCESSES=10
STREAMS_POOL_SIZE=200MB
-- on source db
-- CONNECT strmadmin/strmadmin@TEST10G1
CONNECT strmadmin/strmadmin

CREATE DATABASE LINK TEST10G2 CONNECT TO strmadmin IDENTIFIED BY strmadmin USING 'TEST10G2';

-- on both db
EXEC DBMS_STREAMS_ADM.SET_UP_QUEUE();

ALTER TABLE emp ADD SUPPLEMENTAL LOG DATA (PRIMARY KEY, UNIQUE) COLUMNS;
-- on source db 
BEGIN
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name     => 'scott.emp',   
    streams_type   => 'capture',
    streams_name   => 'capture_stream',
    queue_name     => 
           'strmadmin.streams_queue',
    include_dml    => true,
    include_ddl    => true,
    inclusion_rule => true);
END;
/
-- on source db
BEGIN
  DBMS_STREAMS_ADM.ADD_TABLE_PROPAGATION_RULES(
    table_name                  => 'scott.emp', 
    streams_name                => 'TEST10G1_to_TEST10G2', 
    source_queue_name           => 'strmadmin.streams_queue',
    destination_queue_name      => 'strmadmin.streams_queue@TEST10G2',
    include_dml                 => true,
    include_ddl                 => true,
    source_database             => 'TEST10G1',
    inclusion_rule              => true);
END;
/
-- on target db
GRANT ALL ON scott.emp TO strmadmin;
-- on source db
DECLARE
  source_scn  NUMBER;
BEGIN
  source_scn := DBMS_FLASHBACK.GET_SYSTEM_CHANGE_NUMBER();
  DBMS_APPLY_ADM.SET_TABLE_INSTANTIATION_SCN@TEST10G2(
    source_object_name          => 'scott.emp',
    source_database_name        => 'TEST10G1',
    instantiation_scn           => source_scn);
END;
/
BEGIN
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name      => 'scott.emp',
    streams_type    => 'apply', 
    streams_name    => 'apply_stream',
    queue_name      => 
            'strmadmin.streams_queue',
    include_dml     => true,
    include_ddl     => true,
    source_database => 'TEST10G1',
    inclusion_rule  => true);
END;
/
-- on source db
BEGIN
  DBMS_CAPTURE_ADM.START_CAPTURE( capture_name  =>  'capture_stream');
END;
/
-- on target db
BEGIN
  DBMS_APPLY_ADM.SET_PARAMETER( apply_name  => 'apply_stream', parameter   => 'disable_on_error', value  => 'n');
END;
/
 
BEGIN
  DBMS_APPLY_ADM.START_APPLY( apply_name  => 'apply_stream');
END;
/

Different Subject After this line


BEGIN
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name         => 'hr.employees',
    streams_type       => 'capture',
    streams_name       => 'strm01_capture',
    queue_name         => 'strmadmin.streams_queue',
    include_dml        => TRUE,
    include_ddl        => TRUE,
    include_tagged_lcr => FALSE,
    source_database    => NULL,
    inclusion_rule     => TRUE);
END;
/


BEGIN
  DBMS_CAPTURE_ADM.CREATE_CAPTURE(
    queue_name         => 'strmadmin.streams_queue',
    capture_name       => 'strm02_capture',
    rule_set_name      => 'strmadmin.strm01_rule_set',
    start_scn          => NULL,
    source_database    => NULL,
    first_scn          => NULL);
END;
/

SELECT CAPTURE_NAME, FIRST_SCN, MAX_CHECKPOINT_SCN FROM DBA_CAPTURE;  


BEGIN
  DBMS_CAPTURE_ADM.CREATE_CAPTURE(
    queue_name         => 'strmadmin.streams_queue',
    capture_name       => 'strm05_capture',
    rule_set_name      => 'strmadmin.strm01_rule_set',
    start_scn          => 223525,
    source_database    => NULL,
    first_scn          => NULL);
END;
/


downstream capture:
LOG_ARCHIVE_DEST_2='SERVICE=DBS2.EXAMPLE.COM ASYNC NOREGISTER
   VALID_FOR=(ONLINE_LOGFILES,PRIMARY_ROLE)
   DB_UNIQUE_NAME=dbs2'

LOG_ARCHIVE_DEST_2='SERVICE=DBS2.EXAMPLE.COM ASYNC NOREGISTER
   VALID_FOR=(ONLINE_LOGFILES,PRIMARY_ROLE)
   TEMPLATE=/usr/oracle/log_for_dbs1/dbs1_arch_%t_%s_%r.log
   DB_UNIQUE_NAME=dbs2'



COLUMN FIRST_CHANGE# HEADING 'First SCN' FORMAT 999999999
COLUMN NAME HEADING 'Log File Name' FORMAT A50

SELECT DISTINCT FIRST_CHANGE#, NAME FROM V$ARCHIVED_LOG
  WHERE DICTIONARY_BEGIN = 'YES';

DBA_QUEUE_SCHEDULES
USER_QUEUE_SCHEDULES

V$PROPAGATION_SENDER
V$PROPAGATION_RECEIVER


DBMS_APPLY_ADM 

BEGIN 
  DBMS_STREAMS_ADM.ADD_GLOBAL_RULES(
    streams_type        =>  'capture',
    streams_name        =>  'capture',
    queue_name          =>  'streams_queue',
    include_dml         =>  TRUE,
    include_ddl         =>  TRUE,
    include_tagged_lcr  =>  FALSE,
    source_database     =>  NULL,
    inclusion_rule      =>  TRUE);
END;
/
BEGIN DBMS_STREAMS_ADM.ADD_GLOBAL_RULES(
   streams_type        =>  'capture',
   streams_name        =>  'capture_002',
   queue_name          =>  'streams_queue',
   include_dml         =>  TRUE,
   include_ddl         =>  TRUE,
   include_tagged_lcr  =>  TRUE,
   source_database     =>  NULL,
   inclusion_rule      =>  TRUE);
END;
/

BEGIN 
  DBMS_STREAMS_ADM.ADD_SCHEMA_PROPAGATION_RULES(
    schema_name              =>  'hr',
    streams_name             =>  'dbs1_to_dbs2',
    source_queue_name        =>  'streams_queue',
    destination_queue_name   =>  'streams_queue@dbs2.example.com',
    include_dml              =>  TRUE,
    include_ddl              =>  TRUE,
    include_tagged_lcr       =>  FALSE,
    source_database          =>  'dbs1.example.com',
    inclusion_rule           =>  TRUE);
END;
/

BEGIN 
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name          =>  'hr.locations',
    streams_type        =>  'apply',
    streams_name        =>  'apply',
    queue_name          =>  'streams_queue',
    include_dml         =>  TRUE,
    include_ddl         =>  FALSE,
    include_tagged_lcr  =>  FALSE,
    source_database     =>  'dbs1.example.com',
    inclusion_rule      =>  TRUE);
END;
/

BEGIN 
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name          =>  'hr.countries',
    streams_type        =>  'apply',
    streams_name        =>  'apply',
    queue_name          =>  'streams_queue',
    include_dml         =>  FALSE,
    include_ddl         =>  TRUE,
    include_tagged_lcr  =>  FALSE,
    source_database     =>  'dbs1.example.com',
    inclusion_rule      =>  TRUE);
END;
/

BEGIN 
  DBMS_STREAMS_ADM.ADD_SUBSET_RULES(
    table_name               =>  'hr.regions',
    dml_condition            =>  'region_id=2',
    streams_type             =>  'apply',
    streams_name             =>  'apply',
    queue_name               =>  'streams_queue',
    include_tagged_lcr       =>  FALSE,
    source_database          =>  'dbs1.example.com');
END;
/

BEGIN
  DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
    schema_name         =>  'hr',   
    streams_type        =>  'apply',
    streams_name        =>  'apply',
    queue_name          =>  'streams_queue',
    include_dml         =>  TRUE,
    include_ddl         =>  FALSE,
    include_tagged_lcr  =>  FALSE,
    source_database     =>  'dbs1.example.com',
    inclusion_rule      =>  TRUE);
END;
/

BEGIN 
  DBMS_STREAMS_ADM.ADD_TABLE_RULES(
    table_name          =>  'hr.job_history',
    streams_type        =>  'apply',
    streams_name        =>  'apply',
    queue_name          =>  'streams_queue',
    include_dml         =>  TRUE,
    include_ddl         =>  FALSE,
    include_tagged_lcr  =>  TRUE,
    source_database     =>  'dbs1.example.com',
    inclusion_rule      =>  FALSE);
END;
/


instantiation method: rman

http://docs.oracle.com/cd/B19306_01/backup.102/b14191/rcmdupdb.htm#BRADV165
10gr2 rman 
rman
CONNECT TARGET SYS/change_on_install@stm1.net
CONNECT AUXILIARY SYS/change_on_install@stm2.net
RUN
      { 
        SET UNTIL SCN 45442631;
        ALLOCATE AUXILIARY CHANNEL stm2 DEVICE TYPE sbt; 
        DUPLICATE TARGET DATABASE TO stm2 
        NOFILENAMECHECK
        OPEN RESTRICTED;
      }

instantiation method: exp

exp system/oracle owner=mars file=mars.dump log=mars.log object_consistent=Y

imp system/oracle file=mars.dump full=y ignore=y STREAMS_INSTANTIATION=Y

instantiation method: expdp

-- From Source sqlplus session:

conn system/oracle
!mkdir /tmp/schema_export
!chmod 777 /tmp/schema_export
create or replace directory schema_export as '/tmp/schema_export';
!expdp system/oracle SCHEMAS=MARS DUMPFILE=schema_export:schema.dmp LOGFILE=schema_export:schema.log


-- From Downstream sqlplus session:

conn system/oracle
!mkdir /tmp/schema_import
!chmod 777 /tmp/schema_import
create or replace directory schema_import as '/tmp/schema_import';
-- copy the dump file schema.dmp from '/tmp/schema_export' on source site to '/tmp/schema_import' on downstream site.
!impdp system/oracle SCHEMAS=mars DIRECTORY=schema_import DUMPFILE=schema.dmp

get current scn

SET SERVEROUTPUT ON SIZE 1000000
DECLARE
  current_scn NUMBER;
BEGIN
  current_scn:= DBMS_FLASHBACK.GET_SYSTEM_CHANGE_NUMBER;
      DBMS_OUTPUT.PUT_LINE('Current SCN: ' || current_scn);
END;
/

Removing an Oracle Streams Configuration

-- Removing an Oracle Streams Configuration
EXEC DBMS_STREAMS_ADM.REMOVE_STREAMS_CONFIGURATION();
-- After running this procedure, drop the Oracle Streams administrator at the database, if possible.