Servicii baze de date

MySql 60 Solutii

SQL Server 282 Solutii

How to Trace Data Pump Using a Logon Trigger

The purpose of this document it to explain how Data Pump can be traced using a database logon trigger.

1. Create a database after logon trigger to enable Event 10046 for Data Pump  DM and DW processes :

sqlplus 

connect / as sysdba

 

CREATE OR REPLACE TRIGGER sys.DATA_PUMP_TRIGGER
AFTER LOGON ON DATABASE
DECLARE
  v_program v$session.program%TYPE;
  v_dyn_sql VARCHAR2(100)
BEGIN
  SELECT    substr (program, -5, 2)
  INTO   v_program
  FROM   v$session
  WHERE  sid = (SELECT DISTINCT sid FROM v$mystat);
  IF v_program = ‘DW’ or v_program= ‘DM’ THEN
    EXECUTE IMMEDIATE ‘alter session set tracefile_identifier = ‘||’datapump’;
    EXECUTE IMMEDIATE ‘alter session set statistics_level=ALL’;
    EXECUTE IMMEDIATE ‘alter session set max_dump_file_size=UNLIMITED’;
    EXECUTE IMMEDIATE ‘alter session set events ”10046 trace name context forever, level 12”’;
  END IF;
END;
/

2. Run Data Pump Export/Import with the following parameters added to the command line:

expdp/impdp …  METRICS=Y TRACE=480300

3. Check trace files generated in USER_DUMP_DEST or DIAGNOSTIC_DEST/diag/rdbms/<dbname>/<sid>/trace. Traces have the DPTRC identifiers enclosed in the name.

Example:

-rw-r—–. 1 oracle oinstall 24K Mar 20 17:15 <SID>_dw01_1233_datapump.trm
-rw-r—–. 1 oracle oinstall  7M Mar 20 17:15 <SID>_dw01_1233_datapump.trc
-rw-r—–. 1 oracle oinstall 40K Mar 20 17:15 <SID>_dm00_1234_datapump.trm
-rw-r—–. 1 oracle oinstall  5M Mar 20 17:15 <SID>_dm00_1234_datapump.trc

4. Run TKPROF on the trace files after Data Pump ends.

Example:

[oracle@<HOST> trace]$  tkprof <SID>_dw01_1233_datapump.trc <FILESYSTEM_PATH>/<SID>_dw01_1233_datapump.out waits=yes sort=exeela     

5. Drop SET_DP_TRIGGER

SQL> DROP TRIGGER SYS.SET_DP_TRACE;

[mai mult...]

Cum efectuați un export complet și să excludeți anumite scheme utilizând API-ul Data Pump?

Doriți să efectuați un export complet și să excludeți anumite scheme folosind API-ul Data Pump (dbms_datapump). Următorul exemplu demonstrează cum se efectuează această operație.

Doriți să exportați întreaga bază de date, dar doriți să excludeți SYSTEM și SYS folosind API-ul DataPump. Comanda pentru a efectua această operație din linia de comandă (expdp) arată astfel:

expdp system/<parolă> directory=DATA_PUMP_DIR dumpfile=EXPDAT%U.DMP FULL=y exclude=schemă:\”IN \(\’SYS\’,\’SYSTEM\’\)\”

Operația echivalentă folosind codul PL/SQL Data Pump API pentru a efectua aceeași operațiune ar arăta astfel:

connect system/parola

declare
h1 number; — Datapump handle
dir_name varchar2(30); — Directory Name

begin
dir_name := ‘DATA_PUMP_DIR’;
h1 := dbms_datapump.open(operation =>’EXPORT’,
job_mode =>’FULL’,
job_name =>’FULL MINUS SCHEMAS’,
version => ‘COMPATIBLE’);

 

dbms_datapump.set_parallel(handle => h1, degree => 1);
dbms_datapump.add_file(handle => h1,
filename => ‘EXPDAT.LOG’,
directory => dir_name,
filetype => 3);

dbms_datapump.set_parameter(handle => h1,
name => ‘KEEP_MASTER’,
value=> 0);

dbms_datapump.add_file(handle => h1,
filename => ‘EXPDAT%U.DMP’,
directory => dir_name,
filetype => 1);

dbms_datapump.set_parameter(handle => h1,
name => ‘INCLUDE_METADATA’,
value => 1);

dbms_datapump.set_parameter(handle => h1,
name => ‘DATA_ACCESS_METHOD’,
value => ‘AUTOMATIC’);

dbms_datapump.metadata_filter(handle => h1,
name => ‘NAME_EXPR’,
value =>’NOT IN(”SYS”,”SYSTEM”)’,
object_type => ‘SCHEMA’);

dbms_datapump.set_parameter(handle => h1,
name => ‘ESTIMATE’,
value =>’BLOCKS’);

dbms_datapump.start_job(handle => h1,
skip_current => 0, abort_step =>0);
dbms_datapump.detach(handle => h1);
end;
/

După examinarea logului de export (EXPDAT.LOG), veți putea vedea că schemele SYSTEM și SYS au fost excluse.

[mai mult...]

Cum faci refresh tabelelor locale utilizând API-ul DataPump

Aveți două baze de date (una locală și una la distanță) și intenționați să reîmprospătați unele tabele locale cu date din tabele la distanță. Pentru a demonstra acest lucru este creat următorul mediu:

connect / as sysdba

— test user
create user test identified by <password> default tablespace users temporary tablespace temp;
grant connect, resource to test;

— facem obiecte in schema TEST
connect test/parola

— test tables
create table a_tab
(
id number,
text varchar2(50)
);

create table b_tab
(
id number,
text varchar2(50)
);

— populare tabele de test
insert into a_tab values (1, ‘Remote schema, table A_TAB – Row 1’);
insert into a_tab values (2, ‘Remote schema, table A_TAB – Row 2’);
insert into b_tab values (1, ‘Remote schema, table B_TAB – Row 1’);
insert into b_tab values (2, ‘Remote schema, table B_TAB – Row 2’);
commit; 

 

create table transfer_objects
(
owner varchar2(30),
table_name varchar2(30),
object_type varchar2(30)
);

—  tabele ce for fi actualizate in  baza locala 
insert into transfer_objects values (‘TEST’, ‘A_TAB’, ‘TABLE’);
insert into transfer_objects values (‘TEST’, ‘B_TAB’, ‘TABLE’);
commit;

 

Baza de date locala (target)

 

connect / as sysdba

— directorul folosit pentru logurile DataPump 
create directory refresh as ‘<PATH>’;

— test user
create user test identified by <password> default tablespace users temporary tablespace temp;
grant connect, resource, create database link to test;
grant read, write on directory refresh to test;

— creare obiecte in schema TEST
connect test/parola

–Creare database link din local in remote
create database link link_to_remote connect to test identified by <password> using ‘<TNS alias to remote>’;

create table a_tab
(
id number,
text varchar2(50)
);

create table b_tab
(
id number,
text varchar2(50)
);

— populare tabele de tes
insert into a_tab values (1, ‘Local schema, table A_TAB – Row 1’);
insert into b_tab values (1, ‘Local schema, table B_TAB – Row 1’);
commit;

Porniți următorul bloc PL/SQL care apelează API-ul DataPump în baza de date locală pentru a reîmprospăta tabelele A_TAB/B_TAB de la distanță:

set serverout on size 1000000
declare
my_handle number; — Data Pump job handle
my_db_link varchar2(30) := ‘LINK_TO_REMOTE’;
ind NUMBER; — Loop index
percent_done NUMBER; — Percentage of job complete
job_state VARCHAR2(30); — To keep track of job state
le ku$_LogEntry; — For WIP and error messages
js ku$_JobStatus; — The job status from get_status
jd ku$_JobDesc; — The job description from get_status
sts ku$_Status; — The status object returned by get_status

begin
— creare job 
my_handle := dbms_datapump.open
(
operation => ‘IMPORT’,
job_mode => ‘TABLE’,
remote_link => my_db_link
);

dbms_output.put_line (‘Opened DataPump job with handle: ‘||to_char (my_handle));dbms_datapump.set_parallel (my_handle, 1);
dbms_output.put_line (‘Added parallel’);

dbms_datapump.set_parameter
(
handle => my_handle,
name => ‘TABLE_EXISTS_ACTION’,
value => ‘REPLACE’
);
dbms_output.put_line (‘Added parameter REPLACE’);

dbms_datapump.add_file
(
handle => my_handle,
filename => ‘impdp_test.log’,
directory => ‘REFRESH’,
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
);
dbms_output.put_line (‘Added log file’);

dbms_datapump.metadata_filter
(
handle => my_handle,
name => ‘NAME_EXPR’,
value => ‘IN (SELECT TABLE_NAME FROM TEST.TRANSFER_OBJECTS WHERE OWNER = ”TEST” AND OBJECT_TYPE = ”TABLE”)’,
object_type => ‘TABLE’
);
dbms_output.put_line (‘Added metadata filter’);

dbms_datapump.start_job (my_handle);

percent_done := 0;
job_state := ‘UNDEFINED’;
while (job_state != ‘COMPLETED’) and (job_state != ‘STOPPED’) loop
dbms_datapump.get_status
(
my_handle,
dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip, -1 , job_state, sts
);
js := sts.job_status;

if js.percent_done != percent_done then
dbms_output.put_line (‘*** Job percent done = ‘||to_char (js.percent_done));
percent_done := js.percent_done;
end if;

if (bitand (sts.mask, dbms_datapump.ku$_status_wip) != 0) then
le := sts.wip;
else
if (bitand (sts.mask, dbms_datapump.ku$_status_job_error) != 0) then
le := sts.error;
else
le := null;
end if;
end if;

if le is not null then
ind := le.FIRST;
while ind is not null loop
dbms_output.put_line (le(ind).LogText);
ind := le.NEXT(ind);
end loop;
end if;
end loop;

dbms_output.put_line (‘Job has completed’);
dbms_output.put_line (‘Final job state = ‘||job_state);
dbms_datapump.detach (my_handle);
end;
/

Outputul ar trebui sa fie ca in exemplul de mai jos:

Opened DataPump job with handle: 18
Added parallel
Added parameter REPLACE
Added log file
Added metadata filter
Starting “TEST”.”SYS_IMPORT_TABLE_01″:
Estimate in progress using BLOCKS method…
Processing object type TABLE_EXPORT/TABLE/TABLE_DATA
Total estimation using BLOCKS method: 128 KB
. . imported “TEST”.”A_TAB” 2 rows
. . imported “TEST”.”B_TAB” 2 rows
*** Job percent done = 100
Job “TEST”.”SYS_IMPORT_TABLE_01″ successfully completed at 13:22:54
Job has completed
Final job state = COMPLETED

PL/SQL procedure successfully completed.

Conținutul tabelelor locale după importul DataPump este următorul:

SQL> select * from a_tab;

ID TEXT
————— ————————————————–
1 Remote schema, table A_TAB – Row 1
2 Remote schema, table A_TAB – Row 2

SQL> select * from b_tab;

ID TEXT
————— ————————————————–
1 Remote schema, table B_TAB – Row 1
2 Remote schema, table B_TAB – Row 2

[mai mult...]