Export/Import
A mounted NFS share is a pre-requisite to use Export/Import functionality. The NFS mount is a resource action available in the vRA UI. See Mount NFS Share for details.
Be aware, that database export and import can be a very resource intensive operation. Especially processing large data sets can exeed the standard configuration settings and resources. Pls. consult chapter Large Data Operations before exporting or importing large data sets.
Data Pump Export
In order to initiate an export job, you have to use the PL/SQL package DBMS_DATAPUMP. Login as PDBADMIN, or any other user with DATAPUMP_EXP_FULL_DATABASE privilege. The following example shows how to export a schema by PL/SQL.
Export Example
DECLARE
l_dp_handle NUMBER;
BEGIN
-- Open an schema export job.
l_dp_handle := DBMS_DATAPUMP.open(
operation => 'EXPORT',
job_mode => 'SCHEMA',
remote_link => NULL,
job_name => 'SCOTT_EXPORT',
version => 'LATEST');
-- Specify the dump file name and directory object name.
DBMS_DATAPUMP.add_file(
handle => l_dp_handle,
filename => 'scottSQL.dmp',
directory => 'NFS');
-- Specify the log file name and directory object name.
DBMS_DATAPUMP.add_file(
handle => l_dp_handle,
filename => 'scottSQL.log',
directory => 'NFS',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
-- Specify the schema to be exported.
DBMS_DATAPUMP.metadata_filter(
handle => l_dp_handle,
name => 'SCHEMA_EXPR',
value => '= ''SCOTT''');
DBMS_DATAPUMP.start_job(l_dp_handle);
DBMS_DATAPUMP.detach(l_dp_handle);
END;
/
Data Pump Import
In order to initiate an import job, you have to use the PL/SQL package DBMS_DATAPUMP. Login as PDBADMIN, or any other user with DATAPUMP_IMP_FULL_DATABASE privilege. The following example shows how to execute a full import by PL/SQL.
Import Example
set serveroutput on
DECLARE
ind NUMBER; -- Loop index
h1 NUMBER; -- Data Pump job handle
percent_done NUMBER; -- Percentage of job complete
job_state VARCHAR2(30); -- To keep track of job state
le ku$_LogEntry; -- For WIP and error messages
js ku$_JobStatus; -- The job status from get_status
jd ku$_JobDesc; -- The job description from get_status
sts ku$_Status; -- The status object returned by get_status
BEGIN
h1 := DBMS_DATAPUMP.OPEN('IMPORT','FULL',NULL,'EXAMPLE2');
dbms_datapump.set_parallel(handle => h1, degree => 8);
dbms_datapump.add_file( handle => h1, filename => 'imp_debug.log' , directory => 'NFS' , filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
DBMS_OUTPUT.PUT_LINE('Successfully add log file: '||'imp_debug.log' );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1, name => 'METRICS', value => 1);
DBMS_DATAPUMP.ADD_FILE(h1,'my_data_pump_dumpfile_D00BRM.dmp','NFS');
DBMS_DATAPUMP.START_JOB(h1);
-- The import job should now be running. In the following loop, the job is
-- monitored until it completes. In the meantime, progress information is
-- displayed. Note: this is identical to the export example.
percent_done := 0;
job_state := 'UNDEFINED';
while (job_state != 'COMPLETED') and (job_state != 'STOPPED') loop
dbms_datapump.get_status(h1,
dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip,-1,job_state,sts);
js := sts.job_status;
-- If the percentage done changed, display the new value.
if js.percent_done != percent_done
then
dbms_output.put_line('*** Job percent done = ' ||
to_char(js.percent_done));
percent_done := js.percent_done;
end if;
-- If any work-in-progress (WIP) or Error messages were received for the job,
-- display them.
if (bitand(sts.mask,dbms_datapump.ku$_status_wip) != 0)
then
le := sts.wip;
else
if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
then
le := sts.error;
else
le := null;
end if;
end if;
if le is not null
then
ind := le.FIRST;
while ind is not null loop
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
end loop;
end if;
end loop;
-- Indicate that the job finished and gracefully detach from it.
dbms_output.put_line('Job has completed');
dbms_output.put_line('Final job state = ' || job_state);
dbms_datapump.detach(h1);
END;
/