4 Replies Latest reply: Aug 23, 2013 10:46 AM by user5716448 RSS

    Trying to import tables from datapump file which done using transportable mode

    user5716448



      Hi using impdp on oracle 11.2.0.3 and have a dumpfile which contains export of tables which done using transportable tablespace mode.



      Want to import 3 of the tables just form file cobncerned into another database using impd but not working


      Error

      ORA-39002: invalid operation

      ORA-39061: import mode FULL conflicts with export mode TRANSPORTABLE


      {code}

      userid=archive/MDbip25

      DIRECTORY=TERMSPRD_EXTRACTS

      DUMPFILE=archiveexppre.964.dmp

      LOGFILE=por_200813.log

      PARALLEL=16

      TABLES=ZPX_RTRN_CDN_STG_BAK,ZPX_RTRN_STG_BAK,ZPX_STRN_STG_BAK

      REMAP_TABLESPACE=BI_ARCHIVE_DATA:BI_ARCHIVE_LARGE_DATA

      REMAP_TABLESPACE=BI_ARCHIVE_IDX:BI_ARCHIVE_LARGE_IDX

       

      {code}

       

      Any ideas

        • 1. Re: Trying to import tables from datapump file which done using transportable mode
          DK2010

          Hi,

           

          AFAIK You can't do such thing having dump with TTS. btw can you share the Export command here so can understand  what is in your dump file.

          • 2. Re: Trying to import tables from datapump file which done using transportable mode
            user5716448

            Hi,

             

            Export command

             

            {code}

             

            procedure export_old_partitions_to_disk (pi_SEQ_num NUMBER)

            is

             

             

             

             

              h1 number; -- Datapump handle

              dir_name    CONSTANT    ALL_directories.DIRECTORY_NAME%type :='DATA_EXPORTS_DIR'; -- Directory Name

             

             

            v_file_name varchar2(100);

              v_log_name  varchar2(100);

             

            v_job_status ku$_Status;          -- The status object returned by get_status

                v_job_state VARCHAR2(4000);

                v_status ku$_Status1010;

                v_logs ku$_LogEntry1010;

                v_row PLS_INTEGER;

                v_current_sequence_number archive_audit.aa_etl_run_num_seq%type;

                v_jobState                user_datapump_jobs.state%TYPE;

             

             

            begin

             

             

            -- Set to read only to make transportable

            execute immediate ('alter tablespace ARCHIVED_PARTITIONS read only');

             

             

            -- Get last etl_run_num_seq by querying public synonym ARCHIVE_ETL_RUN_NUM_SEQ

            -- Need check no caching on etl_run_num_seq

            /*

            select last_number - 1

            into v_current_sequence_number

            from ALL_SEQUENCES A

            WHERE A.SEQUENCE_NAME = 'ETL_RUN_NUM_SEQ';

            */

            v_file_name := 'archiveexppre.'||PI_SEQ_NUM||'.dmp';--v_current_sequence_number;

            v_log_name  := 'archiveexpprelog.'||PI_SEQ_NUM||'.log';--v_current_sequence_number;

             

             

            dbms_output.put_line(v_file_name);

            dbms_output.put_line(v_log_name);

             

             

            -- Create a (user-named) Data Pump job to do a schema export.

             

             

            -- dir_name := 'DATA_EXPORTS_DIR';

              h1 := dbms_datapump.open(operation =>'EXPORT',

              job_mode =>'TRANSPORTABLE',

              remote_link => NULL,

              job_name    => 'ARCHIVE_OLD_PARTITIONS_'||PI_SEQ_NUM);

             

             

              dbms_datapump.add_file(handle =>h1,

                                     filename => v_file_name,

                                     directory => dir_name,

                                     filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE,

                                     reusefile => 1); -- value of 1 instructs to overwrite existing file

             

             

              dbms_datapump.add_file(handle =>h1,

                                     filename => v_log_name,

                                     directory => dir_name,

                                     filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE,

                                     reusefile => 1); -- value of 1 instructs to overwrite existing file

             

             

              dbms_datapump.metadata_filter(    

                  handle => h1,

                  name   => 'TABLESPACE_EXPR',

                 VALUE    => 'IN(''ARCHIVED_PARTITIONS'')'

                  );

            --

             

             

            /*

            dbms_datapump.metadata_filter(handle =>h1,

                                     name => 'TABLE_FILTER',

                                     value => 'BATCH_AUDIT');

             

             

            */  

            --dbms_datapump.set_parameter(h1, 'TRANSPORTABLE', 'ALWAYS');

              

             

            -- Start the datapump_job

             

             

            --

             

              

             

             

              dbms_datapump.start_job(h1);

              begin

                  NULL;

                 --dbms_datapump.detach(handle => h1);

              end;

             

             

              dbms_output.put_line('Job has completed');

             

             

             

             

             

             

              dbms_datapump.wait_for_job(h1,v_jobState);

             

              dbms_output.put_line('Status '||v_jobState);

             

               dbms_output.put_line('Job has completed');

             

             

            execute immediate ('alter tablespace ARCHIVED_PARTITIONS read write');

             

            exception

                when others then

                  dbms_datapump.get_status(handle => h1,

                                         mask => dbms_datapump.KU$_STATUS_WIP,

                                         timeout=> 0,

                                        job_state => v_job_state,

                                        status => v_job_status);

              

                           dbms_output.put_line(v_job_state);

             

             

             

             

             

                 

                    MISC_ROUTINES.record_error;

                     raise;

                  --  RAISE_APPLICATION_ERROR(-20010, DBMS_UTILITY.FORMAT_ERROR_BACKTRACE||' '||v_debug_table_name);      

             

             

             

                -- RAISE_APPLICATION_ERROR(-20010,DBMS_UTILITY.format_error_backtrace);

             

             

             

            end export_old_partitions_to_disk;

             

             

            {code}

            • 3. Re: Trying to import tables from datapump file which done using transportable mode
              Dean Gagne

              A transportable export needs to be imported using a transportable import.  Using full=y is not a valid option.  You might be able to pass in an include expression for the tables that you want, but the job still needs to be transportable.  Your import command would look something like:

               

              impdp user/password transport_datafiles=/path1/pathx/dir1/dir5/filename.dbf directory=dpump_dir dumpfile=your_dumpfile.dmp include=table:"In ('a', 'b', 'c')

               

              I see that you are using the api, but this would be the command line type of import.

               

              Hope this helps.

               

              Dean