Notes
![]() ![]() Notes - notes.io |
############################################################################
# File Name SingleDataset.sh
# Description : Load Single dataset of MBIS. Load single dataset for single day from file to Oracle.
#
# YYYYMMDD Name Description
# ------------------------------------------------------------------------ #
# 20180124 Yiting Nan Created
# 20180201 Yiting Nan Update with metadata changes
############################################################################
#if APP_NAME not found run env file.
# source environment
source config/MBIS.env
# source library
source ${WORK_DIR}/mbis_lib.sh
if [ $# -lt 2 ] || [ "${1}" = "?" ];then printf "
Error: Invalid number of parameters
Usage: SingleDataset.sh [SOURCEDATA] [FILEDATE] [DAYORHOUR]
SOURCEDATA -- quotes | peer_index | curve
FILEDATE -- YYYYMMDD format
DAYORHOUR -- daily | hourly
All case insentive
Example: SingleDataset.sh quotes 20180108
or SingleDataset.sh peer_index 20180108
or SingleDataset.sh curve 20180108 daily
";exit 1;fi
####################################
##### Initializing
####################################
#Turn all to lowercase
SOURCEDATA="${1,,}"
if [ ${SOURCEDATA} != "quotes" ] && [ ${SOURCEDATA} != "peer_index" ] && [ ${SOURCEDATA} != "curve" ];then printf "
Error: Invalid SOURCEDATA
Usage: rangeLoad.sh [SOURCEDATA] [STARTDATE] [ENDDATE]
SOURCEDATA -- quotes | peer_index | curve
STARTDATE, ENDDATE -- YYYYMMDD format
ENDDATE is optional. If not specified, will use current date.
ENDDATE should be later than STARTDATE
Example: rangeLoad.sh quotes 20130101
This will load all quotes data from 2013-01-01 to current date.
rangeLoad.sh curve 20171201 20171231
It would load the curve data for December 2017.
";exit 1;fi
FILEDATE="${2,,}"
if [ "${SOURCEDATA}" = "curve" ]
then
if [ $# -lt 3 ] || [ "${1}" = "?" ];
then printf "
Error: Invalid number of parameters
Please specify whether daily hourly load on curve
Usage: SingleDataset.sh [SOURCEDATA] [FILEDATE] [DAYORHOUR]
SOURCEDATA -- quotes | peer_index | curve
FILEDATE -- YYYYMMDD format
DAYORHOUR -- daily | hourly
Example: SingleDataset.sh quotes 20180108
or SingleDataset.sh peer_index 20180108
or SingleDataset.sh curve 20180108 daily
";exit 1;fi
DAYORHOUR="${3,,}"
if [[ ${CURVE_STARTDATE} > ${FILEDATE} ]]
then printf "
Error: Invalid date
Curve data are available back to ${CURVE_STARTDATE}
";exit 1;fi
#999 -- update from metadata MAXFILE. This is number of files need to iterate.
declare -a DATAFILE_PREFIX=(ap_curve. ap_curve_stats. ap_credit_curve. ap_credit_curve_stats. ap_pricing.)
declare -a TIMEDELIM=("''")
FILE_PATH=${DATA_DIR}/${SOURCEDATA}/${FILEDATE}
if [ "${DAYORHOUR}" = "daily" ]
then
declare -a DATEDELIM=("'-'" "''" "'-'" "''" "''")
declare -a TABLENAME=(MBIS_STG_DAILY_AP_CURVE MBIS_STG_DAILY_AP_CURVE_STATS MBIS_STG_DAILY_AP_CURVE MBIS_STG_DAILY_AP_CURVE_STATS MBIS_STG_DAILY_AP_PRICING)
MAXFILE=FILE_IN_SET3;
DATASET=${CURVE_DAILY_DATASET}
LOADPHASE=("MBIS_CURVE_FILE_TO_STAGING" "MBIS_CURVE_STAING_TO_TARGET")
else
declare -a DATEDELIM=("''")
declare -a TABLENAME=(MBIS_STG_HOURLY_AP_CURVE MBIS_STG_HOURLY_AP_CURVE_STATS MBIS_STG_HOURLY_AP_CURVE MBIS_STG_HOURLY_AP_CURVE_STATS MBIS_STG_HOURLY_AP_PRICING)
declare -a HOURS=(0900 1000 1100 1200 1300 1400 1500 1600 1700 1800)
MAXFILE=FILE_IN_SET4;
DATASET=${CURVE_HOURLY_DATASET}
LOADPHASE=("MBIS_CURVE_HOURLY_FILE_TO_STAGING" "MBIS_CURVE_HOURLY_STAING_TO_TARGET")
fi
else
declare -a DATEDELIM=("''")
declare -a TIMEDELIM=("':'")
DAYORHOUR=daily
if [ "${SOURCEDATA}" = "quotes" ]
then
if [[ ${QUOTES_STARTDATE} > ${FILEDATE} ]]
then printf "
Error: Invalid date
Quotes data are available back to ${QUOTES_STARTDATE}
";exit 1;fi
declare -a TABLENAME=(MBIS_STG_DAILY_QUOTES)
DATAFILE_PREFIX=()
MAXFILE=FILE_IN_SET1
FILE_PATH=${DATA_DIR}/quote/${FILEDATE::4}
DATASET=${QUOTES_DATASET}
LOADPHASE=("MBIS_QUOTE_FILE_TO_STAGING" "MBIS_QUOTE_STAING_TO_TARGET")
else
if [ "${SOURCEDATA}" = "peer_index" ]
then
if [[ ${PEER_INDEX_STARTDATE} > ${FILEDATE} ]]
then printf "
Error: Invalid date
Peer_index data are available back to ${PEER_INDEX_STARTDATE}
";exit 1;fi
declare -a TABLENAME=(MBIS_STG_DAILY_PEER_INDEX)
DATAFILE_PREFIX=(${SOURCEDATA}_)
MAXFILE=FILE_IN_SET2;
FILE_PATH=${DATA_DIR}/${SOURCEDATA}/${FILEDATE::4}
DATASET=${PEER_INDEX_DATASET}
LOADPHASE=("MBIS_PEER_INDEX_FILE_TO_STAGING" "MBIS_PEER_INDEX_STAING_TO_TARGET")
else
printf "Error: Invalid SOURCEDATA
Please specify the correct SOURCEDATA: quotes | peer_index | curve
Usage: SingleDataset.sh [SOURCEDATA] [FILEDATE] [DAYORHOUR]";
exit 2
fi
fi
fi
LOG_FILE="${LOG_DIR}/${SOURCEDATA}/${SOURCEDATA}_${FILEDATE}.log"
export LOG_FILE
####################################
#
# Initializing metadata
# If this one already loaded or loading now, log and exit
# If never loaded before, get the ETL_JOB_TRACK_SK for current loading.
#
#####################################
create_etl_job_track ${DATASET} ${FILEDATE}
create_etl_job_track_phase ${LOADPHASE[0]}
echo "Creating a record in metadata. JOB_SK is ${ETL_JOB_TRACK_SK} with phase id: ${ETL_JOB_TRACK_PHASE_SK}"
# Log may be created by a different user that is not the owner of SingleDataset.sh.
# For example, Development environment. Owner is myself, runner is dera.service
# chmod 666 ${LOG_FILE}
####################################
##### Loading to staging tables
####################################
for ((i=0; i<$MAXFILE; ++i))
do
##### Loading to staging tables
echo `date` " Start File ${SOURCEDATA} of ${FILEDATE} to staging tables. " >> ${LOG_FILE}
if [ "${DAYORHOUR}" = "daily" ]
then
SOURCE_CTL=${CONFIG_DIR}/nz_controlfile_template.ctl
echo `date` " control file template as ${SOURCE_CTL} " >> ${LOG_FILE}
DATEFILENAME=${DATAFILE_PREFIX[i]}${FILEDATE}
# check whether the data file is missing
if [ ! -f ${FILE_PATH}/${DATEFILENAME}.csv.gz ]
then
echo `date` " ${FILE_PATH}/${DATEFILENAME}.csv.gz is missing. Please contact the vendor. " >> ${LOG_FILE}
#mail -s "missing file: ${FILE_PATH}/${DATEFILENAME}.csv.gz" -a ${LOG_FILE} ${EMAIL_LIST} < /dev/null
subject="Missing data file"
msg=" Missing data file ${FILE_PATH}/${DATEFILENAME}.csv.gz n"
send_email "${subject}" "${msg}"
#clean up before exit
cleanup ${SOURCEDATA} ${DAYORHOUR}
###########Start with Metadata #####################
error_etl_job_track_phase
error_etl_job_track
########### End with Metadata #####################
exit 3
fi
CONTROL_FILE=ctl.${DATAFILE_PREFIX[i]}ctl
cp ${SOURCE_CTL} ${CONTROL_FILE}
sed -i "s/FILENAME/${DATEFILENAME}/g" ${CONTROL_FILE}
sed -i "s/TABLENAME/${TABLENAME[i]}/g" ${CONTROL_FILE}
sed -i "s/BADFILENAME/${DATEFILENAME}.csv.bad/g" ${CONTROL_FILE}
sed -i "s/DATEDELIMITOR/${DATEDELIM[i]}/g" ${CONTROL_FILE}
sed -i "s/TIMEDELIMITOR/${TIMEDELIM}/g" ${CONTROL_FILE}
###########Start with Metadata #####################
create_etl_job_track_file ${DATASET} ${FILE_PATH} ${DATEFILENAME} ${FILEDATE}
########### End with Metadata #####################
cp ${FILE_PATH}/${DATEFILENAME}.csv.gz ${WORK_DIR}
gunzip ${DATEFILENAME}.csv.gz
nzloader_logfile=${LOG_DIR}/${SOURCEDATA}/${FILEDATE}${DATAFILE_PREFIX[i]}nzloader.log
echo "nnnnnnn: nzload ${NZ_COMMON_CONNECTION} -cf ${CONTROL_FILE} -lf ${nzloader_logfile} "
rev=$(nzload ${NZ_COMMON_CONNECTION} -cf ${CONTROL_FILE} -lf ${nzloader_logfile} )
check_nzloader_log_file "${rev}" ${nzloader_logfile}
else
for j in ${HOURS[@]}
do
SOURCE_CTL=${CONFIG_DIR}/nz_controlfile_template.ctl
echo `date` " control file template as ${SOURCE_CTL} " >> ${LOG_FILE}
echo " hour " $j >> ${LOG_FILE}
DATEFILENAME=${DATAFILE_PREFIX[i]}${FILEDATE}.$j
# check whether the data file is missing
if [ ! -f ${FILE_PATH}/${DATEFILENAME}.csv.gz ]
then
echo `date` " ${FILE_PATH}/${DATEFILENAME}.csv.gz is missing. Please contact the vendor. " >> ${LOG_FILE}
subject="Missing data file"
msg=" Missing data file ${FILE_PATH}/${DATEFILENAME}.csv.gz n"
send_email "${subject}" "${msg}"
#clean up before exit
cleanup ${SOURCEDATA} ${DAYORHOUR}
###########Start with Metadata #####################
error_etl_job_track_phase
error_etl_job_track
########### End with Metadata #####################
exit 3
fi
CONTROL_FILE=ctl.${DATAFILE_PREFIX[i]}.$j.ctl
cp ${SOURCE_CTL} ${CONTROL_FILE}
sed -i "s/FILENAME/${DATEFILENAME}/g" ${CONTROL_FILE}
sed -i "s/TABLENAME/${TABLENAME[i]}/g" ${CONTROL_FILE}
sed -i "s/BADFILENAME/${DATEFILENAME}.csv.bad/g" ${CONTROL_FILE}
sed -i "s/DATEDELIMITOR/${DATEDELIM}/g" ${CONTROL_FILE}
sed -i "s/TIMEDELIMITOR/${TIMEDELIM}/g" ${CONTROL_FILE}
###########Start with Metadata #####################
create_etl_job_track_file ${DATASET} ${FILE_PATH} ${DATEFILENAME} ${FILEDATE}
########### End with Metadata #####################
cp ${FILE_PATH}/${DATEFILENAME}.csv.gz ${WORK_DIR}
gunzip ${DATEFILENAME}.csv.gz
nzloader_logfile=${LOG_DIR}/${SOURCEDATA}/${FILEDATE}${DATAFILE_PREFIX[i]}$j.nzloader.log
rev=$(nzload ${NZ_COMMON_CONNECTION} -cf ${CONTROL_FILE} -lf ${nzloader_logfile} )
check_nzloader_log_file "${rev}" ${nzloader_logfile}
done
fi
done
########### Duplicate checking #####################
########### Transform if it is Quotes###############
###########Start with Metadata #####################
complete_etl_job_track_phase
create_etl_job_track_phase ${LOADPHASE[1]}
########### End with Metadata #####################
echo `date` " Finish File ${SOURCEDATA} of ${FILEDATE} to staging tables. " >> ${LOG_FILE}
####################################
##### Loading to permanent tables
####################################
echo "Creating a record in the MD_ETL_JOB_TRACK_PHASE -- ${LOADPHASE[1]} was successful: " ${etl_job_phase_sk} >> ${LOG_FILE}
echo `date` " Start loading ${SOURCEDATA} from Oracle stage table to permanent table. " >> ${LOG_FILE}
STAGE_SQL_FILE=stage_to_target_${SOURCEDATA}_${DAYORHOUR}.sql
if [ "${DAYORHOUR}" = "hourly" ]
then
cp ${SQL_DIR}/stage_to_target_${SOURCEDATA}_${DAYORHOUR}.sql ${STAGE_SQL_FILE}
else
cp ${SQL_DIR}/stage_to_target_${SOURCEDATA}.sql ${STAGE_SQL_FILE}
fi
sed -i "s/ETL_JOB_TRACK_SK_REPLACEME/${ETL_JOB_TRACK_SK}/g" ${STAGE_SQL_FILE}
sed -i "s/DB_OWNER_REPLACEME/${DB_OWNER}/g" ${STAGE_SQL_FILE}
if [ "${SOURCEDATA}" != "curve" ]
then
sed -i "s/DATEINFILE_REPLACEME/${FILEDATE}/g" ${STAGE_SQL_FILE}
fi
rev=$(nzsql ${NZ_COMMON_CONNECTION} -f ${STAGE_SQL_FILE} )
############ Start update metadata that permanent table update error
if [[ ${rev} =~ "ERROR" ]] || [[ ${rev} =~ "ORA" ]];
then
echo "ORA-Error when loading to the DB for ETL_JOB_TRACK_SK: ${ETL_JOB_TRACK_SK}" >> ${LOG_FILE}
error_etl_job_track_phase
error_etl_job_track
send_email "Failed on JOB_ID: ${ETL_JOB_TRACK_FILE_SK}, PHASE_ID: ${ETL_JOB_TRACK_PHASE_SK}, FILE_ID: ${ETL_JOB_TRACK_FILE_SK}" "ORA MBIS Data Load Status: ORA-Error trying to update record in the MD_ETL_JOB_TRACK table for ${ETL_JOB_TRACK_SK}!"
exit 1
else
echo "Successfully loading to the DB for ETL_JOB_TRACK_SK: ${ETL_JOB_TRACK_SK}" >> ${LOG_FILE}
complete_etl_job_track_phase
complete_etl_job_track
fi
############ End update metadata that permanent table update successfully
#clean up
cleanup ${SOURCEDATA} ${DAYORHOUR}
echo `date` " Finish loading ${SOURCEDATA} from Oracle stage table to permanent table. " >> ${LOG_FILE}
![]() |
Notes is a web-based application for online taking notes. You can take your notes and share with others people. If you like taking long notes, notes.io is designed for you. To date, over 8,000,000,000+ notes created and continuing...
With notes.io;
- * You can take a note from anywhere and any device with internet connection.
- * You can share the notes in social platforms (YouTube, Facebook, Twitter, instagram etc.).
- * You can quickly share your contents without website, blog and e-mail.
- * You don't need to create any Account to share a note. As you wish you can use quick, easy and best shortened notes with sms, websites, e-mail, or messaging services (WhatsApp, iMessage, Telegram, Signal).
- * Notes.io has fabulous infrastructure design for a short link and allows you to share the note as an easy and understandable link.
Fast: Notes.io is built for speed and performance. You can take a notes quickly and browse your archive.
Easy: Notes.io doesn’t require installation. Just write and share note!
Short: Notes.io’s url just 8 character. You’ll get shorten link of your note when you want to share. (Ex: notes.io/q )
Free: Notes.io works for 14 years and has been free since the day it was started.
You immediately create your first note and start sharing with the ones you wish. If you want to contact us, you can use the following communication channels;
Email: [email protected]
Twitter: http://twitter.com/notesio
Instagram: http://instagram.com/notes.io
Facebook: http://facebook.com/notesio
Regards;
Notes.io Team