Skip to main content
Skip table of contents

Import Oracle database

Execution time: 5 minutes + copy dumpfile time + import wait time = 20-60 minutes for a standard censhare database.

Prerequisites

  • ssh access to the database server as user oracle

  • (optional) ssh access to the censhare application server

Check and drop the current CORPUS user

SHOW the non-default schemas in the database.

CODE
{ # Run as oracle_user@database_host

sqlplus -s / as sysdba <<EOF
set pages 100 lines 120 feed off
select username "1.CHECK_EXISTING_USERS" from dba_users 
where account_status='OPEN' and username not like '%SYS%' and username not in ('MGMT_VIEW','DBSNMP');
EOF
echo "# Normally you will see here nothing or only CORPUS, which has to be dropped with the next script. \n" ;
} || {
echo -e "\n\n There is a problem! You must be connected as oracle and the database must be opened."
}
  • If you see many CORPUS_XXX, then you have check with CheckJDBC.sh.
  • If you don't see any CORPUS schemas, then this is an empty or not a standard database.
  • If you see only CORPUS, then you have to drop it with the next script.
Only if you are sure, otherwise it's better if you do an export first.

This script does the following:

  • ASK for schema name

  • DROP the schema
CODE
{ 
 # ask for username to drop
 unset USER_TO_DROP CONFIRM;
 echo -n -e "\nEnter database user to DROP [CORPUS]: " ; 
 read USER_TO_DROP ;
 test -z "$USER_TO_DROP" && USER_TO_DROP=CORPUS ;

 # ask for confirmation
 echo -n "Drop database user '$USER_TO_DROP', Are you sure? (N/y) :"
 unset ans; read ans
 test "$ans" != "y" -a "$ans" != "Y" && {
   echo "OK: no user has been dropped. Please check and rerun the script."
 } || {
   # drop the user
   echo -e "drop user $USER_TO_DROP cascade;\nquit;" | sqlplus -s / as sysdba
 }

} || {
echo -e "\n\n There is a problem! You must be connected as oracle and the database must be opened."
} 

Check and create the tablespaces

SHOW the non-default tablespaces in the database.

CODE
{ 
sqlplus -s / as sysdba <<EOF
set pages 100 lines 120 feed off
select tablespace_name "2.CHECK_FOR_CORPUS_TABLESPACES" from dba_tablespaces 
where contents='PERMANENT' and tablespace_name not in ('SYSTEM','SYSAUX','USERS');
EOF
echo "# It's OK if you see above CORPUS, CORPUS_LOG and CORPUS_CTX[v3 only], otherwise you have to create the tablespaces with the next script." ;
}

If you see more than 3 CORPUS tablespaces, then you have to ask the contact person below.

Create CORPUS tablespaces only If you don't see them in the list above.

CODE
{ 
sqlplus -s / as sysdba <<EOF
CREATE TABLESPACE "CORPUS"
    LOGGING
    DATAFILE '$HOME/orahome/oradata/corpus/CORPUS.dbf'
    SIZE 100M
    AUTOEXTEND ON NEXT 100M
    EXTENT MANAGEMENT LOCAL
    SEGMENT SPACE MANAGEMENT AUTO;
CREATE TABLESPACE "CORPUS_LOG"
    LOGGING
    DATAFILE '$HOME/orahome/oradata/corpus/CORPUS_LOG.dbf'
    SIZE 100M
    AUTOEXTEND ON NEXT 100M
    EXTENT MANAGEMENT LOCAL
    SEGMENT SPACE MANAGEMENT AUTO;
EOF
} || {
echo -e "\n\n There is a problem! You must be connected as oracle and the database must be opened."
}

Copy the dump file

SHOW the default directory in which the dump file must be copied.

CODE
{ 
sqlplus -s / as sysdba <<EOF
set pages 100 lines 70 feed off
select DIRECTORY_PATH "3.COPY_DUMP_FILE_TO"  from dba_directories where directory_name='DATA_PUMP_DIR';
EOF
} || {
echo -e "\n\n There is a problem! You must be connected as oracle and the database must be opened."
}

Switch to NOARCHIVELOG mode (optional)

Only advisable if you are working with big database > 20 GB.
CODE
du -sh /opt/oracle/oradata/corpus
This way to shutdown the DB is not working on Solaris where the DB is managed by SMF.
CODE
{ 
sqlplus / as sysdba <<EOF
shutdown immediate;
startup mount;
alter database noarchivelog;
alter database open;
archive log list;
EOF
}

Import

This script does the following:

  • ASK for filename

  • IMPORT

CODE
{ 
while
echo -n "Enter dump file name to import (without the full path) : ";
do read DUMP_FILE_NAME;  [ -z $DUMP_FILE_NAME ] || break;
done;

echo -n "Enter user to be imported [CORPUS]:"
unset USER_TO_IMPORT; read USER_TO_IMPORT
test -z "$USER_TO_IMPORT" && USER_TO_IMPORT=CORPUS

impdp \"/ as sysdba\" schemas=$USER_TO_IMPORT \
dumpfile=$DUMP_FILE_NAME logfile=$DUMP_FILE_NAME_$(date '+%Y-%m-%d_%H%M%S').ilog 
}

Ignore this error if you are using censhare v4 or later:

CODE
ORA-01919: Rolle 'CORPUS_URL_DATASTORE_ROLE' nicht vorhanden

Alternative: Import to a different database schema

The below command can be used to import the database schema to a different schema.

CODE
{
while 
echo -n "Enter dump file name to import (without the full path) : ";
do read DUMP_FILE_NAME;  [ -z $DUMP_FILE_NAME ] || break;
done;

echo -n "Enter user to be imported [CORPUS]:"
unset USER_TO_IMPORT; read USER_TO_IMPORT
test -z "$USER_TO_IMPORT" && USER_TO_IMPORT=CORPUS
ORIGINAL_TABLESPACE_NAME=$USER_TO_IMPORT

echo -n "Enter desired user to be remapped [CORPUS_TEST]:"
unset NEW_USER; read NEW_USER
test -z "$NEW_USER" && NEW_USER=CORPUS_TEST
NEW_TABLESPACE_NAME=$NEW_USER

impdp \"/ as sysdba\" remap_schema=$USER_TO_IMPORT:$NEW_USER remap_tablespace=$ORIGINAL_TABLESPACE_NAME:$NEW_TABLESPACE_NAME dumpfile=$DUMP_FILE_NAME logfile=$DUMP_FILE_NAME_$(date '+%Y-%m-%d_%H%M%S').ilog
}

Switch back to ARCHIVELOG mode (optional)

Only if you have switched on step 4.
This way to shutdown the DB is not working on Solaris where the DB is managed by SMF.
CODE
{ 
sqlplus / as sysdba <<EOF
shutdown immediate;
startup mount;
alter database archivelog;
alter database open;
archive log list;
EOF
}

Cleanup

GENERATE a command to check for old export files.

CODE
{
sqlplus -s / as sysdba <<EOF
set pages 100 lines 70 feed off
select 'ls -ltrh '|| DIRECTORY_PATH "CLEANUP_UNUSED_FILES"  from dba_directories where directory_name='DATA_PUMP_DIR';
EOF
} || {
echo -e "\n\n There is a problem! You must be connected as oracle and the database must be opened."
} 

JavaScript errors detected

Please note, these errors can depend on your browser setup.

If this problem persists, please contact our support.