Monday, February 18, 2008

Scheduling a datapump job in Oracle 10g - Part1

Subject: Periodically exporting a table data using dbms_datapump and scheduled with dbms_scheduler. We will first create a procedure that exports the table data and then set a scheduled job calls the procedure.

First of all we need an Oracle Directory to export the data. As dbms_datapump runs database side, dbms_datapump package uses database directories and objects to export objects.

create or replace directory expdp_dir as '/oradata/exports/';
grant read,write to public on directory expdp_dir;


One should also have EXP_FULL_DATABASE and IMP_FULL_DATABASE to export and import any other schema object except his own schema.

grant EXP_FULL_DATABASE to ;


You can find the rest of the explanations as comments in the following procedure.

DECLARE

-- handle number comes with DBMS_DATAPUMP.OPEN procedure
n_dp_handle number;

BEGIN

/*
First thing to do is to open a datapump session.

operation: EXPORT | IMPORT | SQL_FILE (DBMS_DATAPUMP.ADD_FILE should be run before)
export modes: FULL | SCHEMA | TABLE | TABLESPACE | TRANSPORTABLE
version: COMPATIBLE (default) | LATEST | 10.0.0
*/

n_dp_handle := DBMS_DATAPUMP.open(
operation=>'EXPORT',
job_mode=>'TABLE',
remote_link=>NULL,
job_name=>'LOGVRM_TMP_EXPORT',
version=>'COMPATIBLE');

/*
add the export file to opened datapump session
you must use '%U' in the filename, incase of parallel export
*/

DBMS_DATAPUMP.add_file(
handle => l_dp_handle,
filename => s_file || '.dmp', --dump file name
directory => 'EP_EXPORT'); --oracle directory

/*
now we start filtering the objects we want to export
name: INCLUDE_NAME_EXPR | EXCLUDE_NAME_EXPR | SCHEMA_EXPR | TABLESPACE_EXPR
INCLUDE_PATH_EXPR | EXCLUDE_PATH_EXPR

in the following example we filter schema, object and partition objects
respectively
*/

DBMS_DATAPUMP.metadata_filter(
handle => l_dp_handle,
name => 'SCHEMA_EXPR',
value => 'IN (''VERIM'')');

DBMS_DATAPUMP.metadata_filter(
handle => l_dp_handle,
name => 'NAME_EXPR',
value => 'IN (''BIR_GERI_ODEME_LOG_VRM'',
''BIR_TEKLIF_LOG_VRM'',
''M_D_HESAP_LOG_VRM'',
''T_GNK_TESIS_LOG_VRM'')',
object_type => 'TABLE');

DBMS_DATAPUMP.data_filter(
handle => l_dp_handle,
name => 'PARTITION_EXPR',
value => 'IN (''PAR_TEMP'')');

/*
you can set parallelism in datapump jobs
every thread of parallel job writes to its own part of file
so dont forget to use %U parameter in the filename
*/

DBMS_DATAPUMP.SET_PARALLEL(
handle => l_dp_handle,
degree => 1);

/*
now we can start the job
*/

DBMS_DATAPUMP.start_job(l_dp_handle);

/*
perodically check the expdp job for the status
s_le_log is the final varchar2 variable that holds the last status of the job
you can check the 'error' string in the s_le_log variable to determine
if the end status has any errors
*/

job_state := 'UNDEFINED';
WHILE (job_state != 'COMPLETED') AND (job_state != 'STOPPED')
LOOP
dbms_datapump.get_status(l_dp_handle,
dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip, -1, job_state, sts);

IF (BITAND(sts.mask,dbms_datapump.ku$_status_wip) != 0)
THEN
le := sts.wip; --work is still in progress
ELSE
IF (BITAND(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
THEN
le := sts.error; --work get an error
ELSE
le := NULL;
END IF;
END IF;
END LOOP;


/*
check the log entry of the job status
*/

IF le IS NOT NULL
THEN
le_ind := le.first;
s_le_log := '';
WHILE le_ind IS NOT NULL LOOP
s_le_log := s_le_log || ' ' || le(le_ind).LogText;
le_ind := le.next(le_ind);
END LOOP;
END IF;

/*
if job hangs drop the master table of the job
every datapump job creates table for itself for logging and monitoring issues
if the job end the table should be dropped automatically,
if not drop the table manual else oracle doesnt allow to run the same job again
*/

select
count(*) into n_ctl2
from
dba_datapump_jobs
where
job_name = 'LOGVRM_TMP_EXPORT' and
owner_name='MARDATA';

IF n_ctl2 > 0 THEN
execute immediate ('drop table MARDATA.LOGVRM_TMP_EXPORT');
END IF;

/*
Check the s_le_log for any error messages
You can also insert the log with a date column
to know what happened during the export
*/

IF (INSTR(LOWER(s_le_log),'error',1) > 0 OR
INSTR(LOWER(s_le_log),'hata',1) > 0)
THEN
NULL;
END IF;


/*
log the export log to know what happened during export
*/

UPDATE DBA_LOGVRM_EXPORT_LOG SET
ENDDATE=SYSDATE,
MSG=substr(s_le_log,1,1000)
WHERE FILENAME=s_full_file; --dump file name with full path
COMMIT;


/*
Dont forget to detach from the job
*/

DBMS_DATAPUMP.DETACH(l_dp_number);

END;


Second Part of this article will cover dbms_scheduler to schedule this procedure to perodically backup any database object with datapump.

1 comment:

  1. Great post.
    I am looking to use this technique for automating backups.
    Can you post your complete DECLARE section as I don't see local variables and type declarations.

    Thanks

    ReplyDelete