#! /bin/bash # upgrade system # $Id: upgrade.bash,v 1.7.1450.1 2007/08/03 12:28:58 didier.leymarie Exp $ # Retrieving # Download a initial job file from URL given in parameters # Check this file # uncompress it in a working directory # Check decompressed data # Launches downloaded script audit # program name declare g_this_program="$0" # URL given in parameter declare p_url="" # complete remote URL for initial job file declare r_initial_url="" #directory where data will be downloaded from, it is a URL declare g_srcdir="" # URL parameters declare -a g_url_parameters #current baseline file declare g_baseline_file="/opt/baseline" #working directory declare g_workingdir="/opt/upgrade" #packages cache directory declare g_packagesdir="/opt/packages" #remote initial job file name declare g_remote_initial_job="" #local initial job file name declare g_local_initial_job="initialjob.tar.gz" # log file declare g_log_file="${g_workingdir}/Upgrade.log" #new packages baseline file declare g_new_packages_baseline_file="${g_workingdir}/baseline" # CURL options for retrieving initial job declare g_curl_retrieving_options="-q --fail --connect-timeout 30 --max-time 300 --silent" # source file contains directory where data will be downloaded from, it is a URL declare g_source_config="source.cfg" #configuration of upgrade operations declare g_upgrade_config="upgrade.cfg" #lock file declare g_lock_file="${g_workingdir}/Verrou" send_alarm() { echo "$1" > ${g_alarm_file} local -i zz=`which create_alarm.bash | wc -l` if (( zz )) then create_alarm.bash -n $1 -o Application -l info -t point fi } set_lock() { echo $$ > ${g_lock_file} } remove_lock() { rm -f ${g_lock_file} } # exit code declare -i g_exitcode=0 # command line options declare -i do_verbose=0 declare -i do_help=0 declare g_options="" message() { printf "%s Upgrading: %s\n" `date +"%04Y.%02m.%02d-%02H:%02M:%02S-%Z"` "$*" >> ${g_log_file} } downloading_initial_job() { local -i l_ret=0 local -i l_exit=0 (( do_verbose )) && printf "Downloading %s\n" ${r_initial_url} >>${g_log_file} curl ${g_curl_retrieving_options} --output ${g_local_initial_job} --url ${r_initial_url} l_exit=$? if [ ${l_exit} -eq 0 ] then (( do_verbose )) && printf "Downloaded %s %d bytes\n" `stat -c '%n' ${g_local_initial_job}` `stat -c '%s' ${g_local_initial_job}` >>${g_log_file} else (( do_verbose )) && printf "Fail to download initial job file from %s (%d).\n" ${r_initial_url} ${l_exit} >>${g_log_file} l_ret=-2 fi (( l_ret != 0 )) && send_alarm UpgradeAborted return ${l_ret} } test_zipfile() { local -i status=2 if [ -f $1 ] then status=1 ret=`/bin/gunzip -tv $1 2>&1 | cut -f2 ` if [ "$ret" = "OK" ] then status=0 fi fi # case $status in # 0) # echo "OK" # ;; # 1) # echo "integrity failed" # ;; # 2) # echo "not found" # ;; # esac return $status } checking_initial_job() { local -i l_ret=0 local -i l_exit=0 (( do_verbose )) && printf "Checking %s " ${g_local_initial_job} >>${g_log_file} test_zipfile ${g_local_initial_job} l_exit=$? if [ ${l_exit} -ne 0 ] then l_ret=-3 (( do_verbose )) && printf "Fail.\n" >>${g_log_file} send_alarm UpgradeAborted else (( do_verbose )) && printf "OK.\n" >>${g_log_file} fi (( l_ret != 0 )) && send_alarm UpgradeAborted return ${l_ret} } unzipping_initial_job() { local -i l_ret=0 local -i l_exit=0 local -a files local -a sizes local -a sums local -i index=0 local -i l_size=0 local l_sum (( do_verbose )) && printf "Unzipping %s " ${g_local_initial_job} >>${g_log_file} /bin/gunzip -c ${g_local_initial_job} | /bin/tar xf - 2>&1 >/dev/null if [ -s control ] then FILE=control { while read -r "line" do files[index]=`echo ${line} | cut -d';' -f1` sizes[index]=`echo ${line} | cut -d';' -f2` sums[index]=`echo ${line} | cut -d';' -f3` (( index++ )) done } < $FILE for (( index=0; (( index < ${#files[@]} )) && (( l_ret == 0 )); index++ )) do f=${files[index]} s=${sizes[index]} c=${sums[index]} if [ -s $f ] then l_size=`stat -c"%s" $f` l_sum=`md5sum $f | cut -d' ' -f1` if [ ${l_size} != ${s} -o ${l_sum} != ${c} ] then l_ret=-6 (( do_verbose )) && printf "\nFile %s is invalid." ${f} >>${g_log_file} fi else l_ret=-5 (( do_verbose )) && printf "\nFile %s does not exist." ${f} >>${g_log_file} fi done if [ ! -s baseline -o ! -s list ] then l_ret=-5 (( do_verbose )) && printf "\nFiles baseline or/and list do not exist." >>${g_log_file} fi (( do_verbose && l_ret == 0 )) && printf "OK.\n" >>${g_log_file} (( do_verbose && l_ret != 0 )) && printf "\nUnzipping %s Fail.\n" ${g_local_initial_job} >>${g_log_file} else l_ret=-4 (( do_verbose )) && printf "Fail.\n" fi (( l_ret != 0 )) && send_alarm UpgradeAborted return ${l_ret} } running_job() { local -i l_ret=-7 if [ -x ${g_workingdir}/fsm.bash ] then ${g_workingdir}/fsm.bash ${g_options} l_ret=$? fi return ${l_ret} } usage() { printf "Usage: %s [options] URL\n\tOptions:\n\t\t-v\tverbose\n\t\t-q\tquick\n\t\t-n\tno stop application\n\t\t-z\tno reboot\n" ${g_this_program} } # main code # process command line while getopts "vqnzh?" c do case ${c} in v) do_verbose=1 g_options="${g_options} -v" ;; q|n|z) g_options="${g_options} -${c}" ;; h|?) do_help=1 ;; esac done #retrieve all non-options arguments in a array declare -a args=(${@:${OPTIND}}) declare -i is_url_defined=0 if (( ( ${#args[@]} > 0 ) && ( ${do_help} == 0 ) )) then j=0 for (( i=0;i<${#args[@]};i++ )) do l_name=`echo "${args[i]}" | cut -d'=' -f 1 | tr [A-Z] [a-z] ` l_value=`echo "${args[i]}" | cut -d'=' -f 2` case ${l_name} in url) if [ ! -z ${l_value} ] then p_url=${l_value} is_url_defined=1 fi ;; *) g_url_parameters[j]="g_urlpar_${l_name}=\"${l_value}\"" (( j++ )) ;; esac done fi if (( ( ${is_url_defined} != 0 ) && ( ${do_help} == 0 ) )) then g_srcdir=`dirname "${p_url}"` g_remote_initial_job=`basename "${p_url}"` r_initial_url="${g_srcdir}/${g_remote_initial_job}" #(( do_verbose )) && printf "Initial job file will be retrieved from %s.\n" ${r_initial_url} #(( do_verbose )) && printf "Data will be retrieved from %s.\n" ${g_srcdir} #(( do_verbose )) && printf "Working directory %s.\n" ${g_workingdir} #(( do_verbose )) && printf "Packages cache directory %s.\n" ${g_packagesdir} #if [ -d ${g_workingdir} ] #then # rm -rf ${g_workingdir}/* #fi if [ ! -d ${g_workingdir} ] then mkdir ${g_workingdir} chmod a+rwx ${g_workingdir} fi if [ ! -d ${g_packagesdir} ] then mkdir ${g_packagesdir} chmod a+rwx ${g_packagesdir} fi cd ${g_workingdir} rm -f baseline version echo ${g_srcdir} > ${g_source_config} rm -f ${g_upgrade_config} if (( ${#g_url_parameters[@]} > 0 )) then printf "%s\n" ${g_url_parameters[@]} > ${g_upgrade_config} else touch ${g_upgrade_config} fi printf "\n\n##################################### START (%d)\n" $$ >> ${g_log_file} set_lock lsof -p $$ >> ${g_log_file} # set nohup trap "" 1 exec >>/var/log/Upgrade.log exec 2>&1 exec 0> ${g_log_file} # Need to extend watchdog for QT-200 Mark 1 NOR flash as # deleting files and/or writing to flash causes watchdog reset (SUP-122 & QTT-4387) # This is needed in the existing newer baseline for an downgrade to an old baseline message `printf "Restart watchdog with maximum margin and no QT application check, existing baseline"` rc.watchdog -m -t restart >> ${g_log_file} for g_step in downloading checking unzipping running do message `printf "%s" ${g_step}` case ${g_step} in downloading) downloading_initial_job ;; checking) checking_initial_job ;; unzipping) unzipping_initial_job ;; running) running_job ;; esac l_retour=$? if [ ${l_retour} -ne 0 ] then g_exitcode=${l_retour} break fi done remove_lock else usage g_exitcode=-1 fi exit ${g_exitcode}