#!/bin/ksh -a
export PATH=/bin:/usr/bin:${PATH}
export DBOID=$(ps -o user -p $$ | awk 'NR == 2 { print $1 }')
#export DBOHOME=$(finger -m ${DBOID} | sed -n 's/Directory:[ ]*\([0-9a-zA-Z/]*\)[ ]*Shell:.*/\1/p' | uniq)
export DBOHOME=$HOME
export SOURCEFILE="${DBOHOME}/bin/shell/DBA_Support_Maint_Env.ksh"
### ----------------------------------------------------------------------------
### function to prevent the users to run this script in the debug mode or
### verbose mode
### ----------------------------------------------------------------------------
function f_Chk_InvkMode
{
typeset -u V_INVK_STR=$1
V_INVK_STR_LN=`echo ${V_INVK_STR} | wc -m`
while [ ${V_INVK_STR_LN} -gt 0 ]
do
V_INVK_CH=`echo ${V_INVK_STR} | cut -c${V_INVK_STR_LN}`
V_INVK_STR_LN=`expr ${V_INVK_STR_LN} - 1`
if [[ "${V_INVK_CH}" = "X" || "${V_INVK_CH}" = "V" ]]
then
echo " "
echo "You can not run this program in debug/verbose mode"
echo " "
exit 1
fi
done
}
f_Chk_InvkMode $-
### End of f_Chk_InvkMode function.
### ----------------------------------------------------------------------------
function f_lGetDT
{
V_DATE=`date | tr "[:lower:]" "[:upper:]" | awk '{ print $2"-"$6" "$4 }'`
V_DY=`date | awk '{ print $3 }'`
if [ ${V_DY} -lt 10 ]
then
V_DY="0${V_DY}"
fi
V_DATE="${V_DY}-${V_DATE}"
V_DATE="[${V_DATE}]\t "
echo ${V_DATE}
}
### ----------------------------------------------------------------------------
### Function to show the help menu.
### ----------------------------------------------------------------------------
function f_help
{
echo " "
echo "\tUsage : "
echo " "
echo "\t\tData_Pump_Backup.ksh <Instance Name> <User Name>"
echo " "
exit 1
}
### end of f_help function.
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### Function to check export the schema statistics to a table.
### ----------------------------------------------------------------------------
function f_Exp_Stats
{
typeset -u v_statsexp_tab="DPUMP_DB_SCMA_STATS"
echo " "
echo "`f_lGetDT`Exporting the schema statistics into ${v_statsexp_tab} table ..."
${ORACLE_HOME}/bin/sqlplus -s -L -R 3 <<-EOFSQL
${OUSER}
WHENEVER OSERROR EXIT 9
WHENEVER SQLERROR EXIT SQL.SQLCODE
DECLARE
v_tab_cnt NUMBER := 0;
v_tname VARCHAR2(30) := '${v_statsexp_tab}';
BEGIN
-- if the table exists drop it first.
SELECT count(1) INTO v_tab_cnt
FROM user_tables
WHERE table_name = v_tname;
IF v_tab_cnt >=1 THEN
EXECUTE IMMEDIATE 'DROP TABLE '||v_tname||' PURGE';
END IF;
-- Creating the table to hold the schema statistics.
dbms_stats.create_stat_table(ownname => user,
stattab => v_tname);
-- Exporting the schema statistics.
dbms_stats.export_schema_stats(ownname => user,
stattab => v_tname);
EXCEPTION
WHEN others THEN
RAISE_APPLICATION_ERROR(-20001,sqlerrm);
END;
/
EOFSQL
if [ $? -ne 0 ]
then
echo " "
echo "`f_lGetDT`ERROR: in exporting the schema statistics."
return 1
else
echo " "
echo "`f_lGetDT`SUCCESS: Schema statistics export is completed to ${v_statsexp_tab}."
fi
}
### End of f_Exp_Stats function.
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### Function the compress the data pump files using the gzip command currently.
### It is using DPUMP_MAX_ZIP to fire a corresponding number of compression
### programs, until exhausted the to-be-compressed files
### Global Variable: v_dir_path, DPTAG_NAME
### ----------------------------------------------------------------------------
function f_gzip_files
{
typeset v_zip_cmd="gzip"
typeset flist="/tmp/._z_${UNIQ}"
ls -1 ${v_dir_path}/${DPTAG_NAME}*.dmp >${flist} || {
echo "$(f_lGetDT)ERROR: cannot write to temporary file ${flist}, f_gzip_files()"
return 1
}
typeset -i bef_file_sz=$( ls -l ${v_dir_path}/${DPTAG_NAME}*.dmp | awk '{ sum += $5 } END { printf "%d", sum/1024 }' )
echo "$(f_lGetDT)Total no of data dump files before compress: $(wc -l <${flist})."
echo "$(f_lGetDT)Total size of all data dump files before compress: ${bef_file_sz} KB."
echo "$(f_lGetDT)max concurrent of zip: ${DPUMP_MAX_ZIP} ."
typeset start_dt="$(date '+%F %T')"
for dpfile in $(<${flist})
do
echo "$(f_lGetDT)${v_zip_cmd} ${dpfile}..."
${v_zip_cmd} -f ${dpfile} &
sleep 1
while [ $(jobs | wc -l) -ge ${DPUMP_MAX_ZIP} ]
do
sleep 5
done
done
#- wait for all background process completed
echo "$(f_lGetDT)No more, waiting for all background ${v_zip_cmd} processes to complete..."
wait
typeset -i l_rc=0
#- check the original list, it should be 0 since all *.dmp should have
#- converted to *.dmp.gz by now
if [ $(ls -1 $(<${flist}) 2>/dev/null | wc -l) -ne 0 ]; then
echo "$(f_lGetDT)ERROR: The ${v_zip_cmd} completed, but the counts don't seem to match..."
echo "$(f_lGetDT)ERROR: There are still .dmp files for this tag..."
l_rc=1
else
typeset -i aft_file_sz=$( ls -l ${v_dir_path}/${DPTAG_NAME}*.dmp.gz | awk '{ sum += $5 } END { printf "%d", sum/1024 }' )
echo "$(f_lGetDT)The ${v_zip_cmd} completed successfully, ${start_dt} - $(date '+%F %T')."
echo "$(f_lGetDT)bef_file_sz=${bef_file_sz} KB & aft_file_sz=${aft_file_sz} KB"
l_rc=0
fi
rm -f ${flist}
return ${l_rc}
}
### End of f_gzip_files function.
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### Function to start the data pump. This will generate the data pump parameter
### file on the fly and kick the data pump using that parameter file.
### ----------------------------------------------------------------------------
function f_data_pump
{
DPJOB_NAME="EXPDP${UNIQ}"
echo " "
echo "`f_lGetDT`Data Pump JOB Name : ${DPJOB_NAME}"
DPJOB_PARFILE="${DPJOB_NAME}.par"
touch ${DPJOB_PARFILE}
chmod 700 ${DPJOB_PARFILE}
v_db_ver=`${ORACLE_HOME}/bin/sqlplus -s -L -R 3 <<-EOFSQL
${OUSER}
WHENEVER OSERROR EXIT 9
WHENEVER SQLERROR EXIT SQL.SQLCODE
SET ECHO OFF HEAD OFF PAGES 0 FEEDBACK OFF
SELECT replace(database_version_id,'.','_')
FROM database_version;
EOFSQL`
if [ $? -ne 0 ]
then
return 1
fi
DPTAG_NAME="${V_SID}_${V_SCMA}_${v_db_ver}_${UNIQ}"
echo " "
echo "`f_lGetDT`Data Pump TAG Name : ${DPTAG_NAME}"
echo " "
echo "`f_lGetDT`Generating the expdp parameter file ..."
echo "DIRECTORY=${v_dpdir_name}" > ${DPJOB_PARFILE}
echo "DUMPFILE=${v_dpdir_name}:${DPTAG_NAME}_%UA%U" >> ${DPJOB_PARFILE}
echo "LOGFILE=expdp${DPTAG_NAME}.log" >> ${DPJOB_PARFILE}
echo "JOB_NAME=${DPJOB_NAME}" >> ${DPJOB_PARFILE}
echo "FILESIZE=${DPUMP_MAX_SZ}G" >> ${DPJOB_PARFILE}
echo "PARALLEL=48" >> ${DPJOB_PARFILE}
echo "EXCLUDE=STATISTICS,AUDIT_OBJ,GRANT" >> ${DPJOB_PARFILE}
echo "SCHEMAS=${V_SCMA}" >> ${DPJOB_PARFILE}
echo "VERSION=19.0.0" >> ${DPJOB_PARFILE}
if [ "${V_SCMA}" = "DM_MASTER_P" ]
then
cat /export/appl/datapump/adhoc/EXCLUDE_TAB_LIST >> ${DPJOB_PARFILE}
fi
echo "COMPRESSION=ALL" >> ${DPJOB_PARFILE}
echo " "
echo "`f_lGetDT`Completed the generation of expdp parameter file."
echo " "
echo "`f_lGetDT`Following are the parameter file contents."
echo " "
cat ${DPJOB_PARFILE}|sed 's/^/ /g'
echo " "
echo "`f_lGetDT`Starting the export data pump ..."
${ORACLE_HOME}/bin/expdp PARFILE=${DPJOB_PARFILE} <<-EOFDPUMP
${OUSER}
EOFDPUMP
if [ $? -ne 0 ]
then
echo " "
echo "`f_lGetDT`ERROR: in the \"expdp\" operation."
echo " "
return 1
else
echo " "
echo "`f_lGetDT`Datapump JOB is completed."
fi
sleep 2
echo " "
echo "`f_lGetDT`Reading the data pump log file to check status of the job ..."
v_dpump_log_file="${V_SID}_${V_SCMA}_${v_db_ver}_expdp.tmp"
${ORACLE_HOME}/bin/sqlplus -s -L -R 3 <<-EOFSQL >> ${v_dpump_log_file}
${OUSER}
WHENEVER OSERROR EXIT 9
WHENEVER SQLERROR EXIT SQL.SQLCODE
SET SERVEROUTPUT ON LINE 120 FEEDBACK OFF
DECLARE
vInHandle utl_file.file_type;
vNewLine VARCHAR2(300);
BEGIN
vInHandle := utl_file.fopen('${v_dpdir_name}','expdp${DPTAG_NAME}.log', 'R');
LOOP
BEGIN
utl_file.get_line(vInHandle, vNewLine);
dbms_output.put_line(vNewLine);
EXCEPTION
WHEN others THEN
EXIT;
END;
END LOOP;
utl_file.fclose(vInHandle);
END fopen;
/
EOFSQL
if [ $? -ne 0 ]
then
echo " "
cat ${v_dpump_log_file}|sed 's/^/ /g'
echo " "
echo "`f_lGetDT`ERROR: in reading the data pump log file."
echo " "
return 1
else
cat ${v_dpump_log_file}|sed 's/^/ /g'
fi
if [ $(cat ${v_dpump_log_file}|grep -c "ORA-[0-9][0-9]") -ge 1 ]
then
echo " "
echo "`f_lGetDT`ERROR: in data pump export. Please check the log for Oracle Errors."
return 1
elif [ $(cat ${v_dpump_log_file}|grep -wc "successfully completed") -eq 0 ]
then
echo " "
echo "`f_lGetDT`ERROR: in completing the data pump job successfully. Please check the log."
return 1
fi
# Removing the temporary files generated on the fly.
rm -f ${v_dpump_log_file}
rm -f ${DPJOB_PARFILE}
}
### End of f_data_pump function.
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### Function to check for the temporary working directory existance. if not this
### function with create the temporary working directory.
### ----------------------------------------------------------------------------
function f_wdir_chk
{
echo " "
echo "`f_lGetDT`Checking for the temporary working directory ..."
if [ ! -d ${v_wdir} ]
then
echo " "
echo "`f_lGetDT`Directory \"${v_wdir}\" not found, then creating ..."
mkdir -p ${v_wdir}
echo " "
echo "`f_lGetDT`Directory creation completed."
fi
}
### End of f_wdir_chk dunction.
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### Function to find out the schema type and the password for the user.
### ----------------------------------------------------------------------------
function f_Get_Stype_Pwd
{
echo " "
echo "`f_lGetDT`Finding the password for the ${V_SCMA}@${V_SID} ..."
## V_USR_PWD="`${DBOHOME}/admin/perl/scripts/F_GET_PWD -d ${V_SID} -u ${V_SCMA}`"
V_USR_PWD=$(get_pwd_from_mdta ${V_SID} ${V_SCMA})
if [ $? -ne 0 ]
then
echo " "
echo "`f_lGetDT`ERROR: in finding the password for ${V_SCMA}@${V_SID}."
return 1
else
echo " "
echo "`f_lGetDT`Found the password for ${V_SCMA}@${V_SID}."
fi
export OUSER="${V_SCMA}/${V_USR_PWD}@${V_SID}"
echo " "
echo "`f_lGetDT`Finding the schema type of ${V_SCMA}@${V_SID} ..."
export v_scma_typ="`rcl stype`"
if [ "${v_scma_typ}" = "1" ]
then
export v_dpdir_name="TXDB_DPUMP_DIR"
elif [ "${v_scma_typ}" -eq "2" ]
then
export v_dpdir_name="ORDB_DPUMP_DIR"
else
export v_dpdir_name=""
fi
##if [ "${V_SID}" ="POSS01" ]
##then
## export v_dpdir_name="TXDB_DPUMP_DIR"
##else [ "${V_SID}"= "POODS01" ]
##export v_dpdir_name="ORDB_DPUMP_DIR"
##fi
if [ "${v_dpdir_name}" = "" ]
then
echo " "
echo "`f_lGetDT`ERROR: in finding the schema type."
echo "`f_lGetDT`ERROR: or invalid schema code. "
return 1
fi
echo " "
echo "`f_lGetDT`${V_SCMA}@${V_SID} Schema type code is ${v_scma_typ} (1=TX, 2=DM)"
}
### End of f_Get_Stype_Pwd function.
### ----------------------------------------------------------------------------
### The main routine starts executing from here.
export RsCeIsDgsB=$$
export V_SEVERITY=MAJOR
export TNS_ADMIN="${DBOHOME}/bin/network"
### Checking for the not of arguments supplied to this program.
if [ $# -lt 2 ]
then
f_help
else
typeset -u V_SID=$1
typeset -u V_SCMA=$2
typeset -i -x DPUMP_MAX_ZIP=${DPUMP_MAX_ZIP:-24}
typeset -i -x DPUMP_MAX_SCP=${DPUMP_MAX_SCP:-10}
typeset -i -x DPUMP_MAX_SZ=${DPUMP_MAX_SZ:-24}
fi
### Initilizing all the variables. Later some of this part
### can be moved to a configuration file.
export UNIQ=$(date +%Y%m%d%H%M%S) # Uniq value based on date to be used in log file name.
export PRFX="${V_SID}_${V_SCMA}"
export v_bdir="$HOME/stage/RC_WORK_DIR" # base directory for the temp working directory.
export v_wdir="${v_bdir}/${V_SID}_${V_SCMA}_expdp_${UNIQ}" # Temporary working directory.
export V_HOST="${V_SID}" # Host Name for the EMM Alert.
export V_KEY="${V_SID}_PROD_DATA_PUMP" # EMM Alter Key
export V_SUBJECT="Data Pump backup of ${V_SCMA}@${V_SID}" # eMail subject.
export v_log_file="${PRFX}_Data_Pump_Backup_${UNIQ}.log" # Log file name.
export t_log_file="${PRFX}_Data_Pump_Backup_${UNIQ}.tmp" # Temporary log file name.
#export v_autosys_inst="PA1" # AutoSys instance name for the production.
#export v_AutoSys_MN_box="OL#box#DSCRUB_pu01" # this is the main box job by unix.
#export v_AutoSys_DB_box="OL#box#DSCRUB_dbstart" # this is box job to start database and listener.
##export v_AutoSys_BCV_cmd_TX="SAN#cmd#POSS01B_CSplit" # AutoSys JOB for TXDB BCV Split.
#export v_AutoSys_BCV_cmd_TX="UX#box#POSS01B_Snap" # AutoSys JOB for TXDB BCV Split.
###export v_AutoSys_BCV_cmd_DM="SAN#cmd#POODS01B_CSplit" # AutoSys JOB for ORDB BCV Split.
#export v_AutoSys_BCV_cmd_DM="SAN#box#POODS01B_Snap" # AutoSys JOB for ORDB BCV Split.
#export v_autosys_env_file="/export/apps/sched/autouser/autosys.bash.${v_autosys_inst}"
# AutoSys environment source file.
export v_src_host="tlp-ze-bkubcv02" # Source host name where data pump supposed to run.
export v_tx_sid="TOSSDP01" # Transaction data base name.
export v_dm_sid="TOODSDP1" # Data Mart data base name.
##export v_scp_target_host="vcore04-doma" # host name where dump files need to be SCPd.
#export v_scp_target_host="alp-ze-d001" # host name where dump files need to be SCPd.
#export v_scp_target_user="zjdbov" # User name on the target host.
export v_thold_fs_size=85 # Threash hold size to keep the EMM blocker.
export ERRCODE=0 # ERRCODE for all the failures.
export EMMERRCODE=0 # ERRCODE only for EMM blocker failures.
echo " " > /tmp/${t_log_file}
echo "`f_lGetDT`This log file name is ${v_wdir}/${v_log_file}">> /tmp/${t_log_file}
f_wdir_chk >> /tmp/${t_log_file}
cd ${v_wdir}
if [ $? -ne 0 ]
then
echo " "
echo "`f_lGetDT`ERROR: in changing the directory ${v_wdir}"
ERRCODE=1
else
cat /tmp/${t_log_file} > ${v_log_file}
rm -f /tmp/${t_log_file}
fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_Set_AutoSys_Env >> ${v_log_file}
# if [ $? -ne 0 ]; then
# ERRCODE=1
# fi
#fi
#
##if [ ${ERRCODE} -eq 0 ]; then
## f_Check_BCV_Split >> ${v_log_file}
## if [ $? -ne 0 ]; then
## V_MSG="BCV Split Check"
## ERRCODE=1
## fi
##fi
#- Source ${SOURCEFILE} only databases are expected to be available
#- Since ERRCODE gets in the SOURCEFILE, tempoarily work-around is
#- to capture ERRCODE value and set it back after sourcing SOURCEFILE
typeset l_errcode=${ERRCODE}
echo "`f_lGetDT`Sourcing the env. script files, errcode before=${ERRCODE} ..." >> ${v_log_file}
. ${SOURCEFILE}
ERRCODE=${l_errcode}
echo "`f_lGetDT`completed sourcing the script file, errcode after=${ERRCODE} ..." >> ${v_log_file}
echo "`f_lGetDT`TNS_ADMIN=${TNS_ADMIN} ..." >> ${v_log_file}
if [ ${ERRCODE} -eq 0 ]; then
f_Get_Stype_Pwd >> ${v_log_file}
if [ $? -ne 0 ]; then
V_MSG="Password and user type check"
ERRCODE=1
else
# data pump path in the target host.
export v_scp_target_path="/export/appl/datapump/`echo ${v_dpdir_name}|cut -c1-4`"
fi
fi
if [ ${ERRCODE} -eq 0 ]; then
f_Check_Env_DB >> ${v_log_file}
if [ $? -ne 0 ]; then
V_MSG="DB Environment Check"
ERRCODE=1
fi
fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_NPI_Scrub >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="NPI Scrub"
# ERRCODE=1
# fi
#fi
#
#if [ ${ERRCODE} -eq 0 ]; then
# f_EMM_Blocker "BLOCK" ${v_src_host} >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="EMM Blocker for ${v_src_host}"
# EMMERRCODE=1
# fi
#fi
if [ ${ERRCODE} -eq 0 ]; then
f_Exp_Stats >> ${v_log_file}
if [ $? -ne 0 ]; then
V_MSG="Statistics Export"
ERRCODE=1
fi
fi
if [ ${ERRCODE} -eq 0 ]; then
f_data_pump >> ${v_log_file}
if [ $? -ne 0 ]; then
V_MSG="Data Pump"
ERRCODE=1
fi
fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_Mount_Unmount_Inst "SHUTDOWN" >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="UnMount Data Base"
# ERRCODE=1
# fi
#fi
#
#if [ ${ERRCODE} -eq 0 ]; then
# f_gzip_files >> ${v_log_file}
# if [ $? -ne 0 ]; then
# export V_SEVERITY=MINOR
# V_MSG="gzip dump file"
# ERRCODE=1
# fi
#fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_Check_SSH >> ${v_log_file}
# if [ $? -ne 0 ]; then
# export V_SEVERITY=MINOR
# V_MSG="SSH Connectivity"
# ERRCODE=1
# fi
#fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_EMM_Blocker "UNBLOCK" ${v_src_host} >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="EMM UnBlocker for ${v_src_host}"
# export V_SEVERITY=MINOR
# EMMERRCODE=1
# fi
#fi
#
#if [ ${ERRCODE} -eq 0 ]; then
# f_EMM_Blocker "BLOCK" ${v_scp_target_host} >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="EMM Blocker for ${v_src_host}"
# export V_SEVERITY=MINOR
# EMMERRCODE=1
# fi
#fi
#
#typeset SCPERRCODE=0
#if [ ${ERRCODE} -eq 0 ]; then
# f_scp_files >> ${v_log_file}
# SCPERRCODE=$?
# #- SCPERRCODE=1 - scp error, SCPERRCODE=2 - scp WARNING
# if [ ${SCPERRCODE} -ne 0 ]; then
# V_SEVERITY=MINOR
# ERRCODE=1
# case ${SCPERRCODE} in
# 2) V_MSG="SCP dump files, file counts are not the same between source and target hosts"
# ;;
# 3) V_MSG="SCP dump files, byte counts are not the same between source and target hosts"
# ;;
# *)
# V_MSG="SCP dump files, check the log for more details"
# ;;
# esac
# fi
#fi
#if [ ${ERRCODE} -eq 0 ]; then
# f_EMM_Blocker "UNBLOCK" ${v_scp_target_host} >> ${v_log_file}
# if [ $? -ne 0 ]; then
# V_MSG="EMM UnBlocker for ${v_scp_target_host}"
# export V_SEVERITY=MINOR
# EMMERRCODE=1
# fi
#fi
echo " " >> ${v_log_file}
if [ ${ERRCODE} -eq 1 -o ${EMMERRCODE} -eq 1 ]
then
v_pager_flag="Y"
if [ "${V_SEVERITY}" = "MINOR" ]
then
V_SUBJECT="WARNING: ${V_SUBJECT} (Fail at ${V_MSG})"
else
V_SUBJECT="ERROR: ${V_SUBJECT} (Fail at ${V_MSG})"
fi
banner ERROR >> ${v_log_file}
else
v_pager_flag="N"
V_SUBJECT="SUCCESS: ${V_SUBJECT}"
banner SUCCESSFULL >> ${v_log_file}
fi
cp ${v_log_file} ${LOGDIR}
f_emm_alert ${v_pager_flag} Y ${v_log_file}
exit ${ERRCODE}
### End of the Script