#!/usr/bin/sh #---------------------------------------------------------------------------------- # /var/install/bin/certs-update-crl - update crl files # # Copyright (c) 2001-2025 The Eisfair Team, team(at)eisfair(dot)org # # Creation: 2007-05-29 jed # Last Update: $Id$ # # Usage: # # certs-update-crl [--quiet] --checkall - check if all CRL files exist # # certs-update-crl [--quiet] --all - force update of all CRL files # certs-update-crl [--quiet] -single - force update of a single CRL file # # certs-update-crl [--quiet] -grepsingleuri - grep CRL URI from single # certificate and continue with # check of at-jobs # certs-update-crl [--quiet] --grepuri - grep CRL URIs from certificates # and continue with check of # at-jobs # # certs-update-crl [--quiet] -grepsingleurionly - grep CRL URI from # certificates only # certs-update-crl [--quiet] --grepurionly - grep CRL URIs from # certificates only # # certs-update-crl [--quiet] --showsingleuri - show URI of a single certificate # # certs-update-crl [--quiet] --createjobs - create initial at-jobs based on # information fetched from CRLs # certs-update-crl [--quiet] --deletejobs - delete at-jobs based on job list file # certs-update-crl [--quiet] --checkjobs - check if all at-jobs exist # certs-update-crl [--quiet] --listjobs - list all active at-jobs # certs-update-crl [--quiet] --listnextjob - list next scheduled at-job # certs-update-crl [--quiet] -jobdetails - show details of a job # # certs-update-crl [--quiet] -searchsingleuri | - search certificates # which are referring to given CRL URI # certs-update-crl [--quiet] --searchalluris - search certificates which are # referring to CRL on the active # CRL list # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. #---------------------------------------------------------------------------------- # include eislib etc. . /var/install/include/eislib . /var/install/include/jedlib # activate debug output #debug_certs=true if ${debug_certs:-false} then case $1 in *-[acdgljs]* ) # add function call to trace file name cmd_name=`echo "$1" | sed 's/^-*//'` exec 2>/tmp/$(basename ${0})-config-${cmd_name}-$$.log ;; * ) exec 2>/tmp/$(basename ${0})-config-$$.log ;; esac set -x ask_debug=true export ask_debug fi EXEC_TIMESTAMP="`date +"%Y-%m-%d %H:%M:%S"`" EXEC_CMD_LINE="$0 $*" EXEC_INSTANCE=$$ pgmname=`basename $0 .sh` logdir=/var/log rundir=/var/run tmpdir=/tmp ssldir=/usr/local/ssl vardir=/var/certs/ssl certdir=${ssldir}/certs crldir=${ssldir}/crl joblist=${ssldir}/certs-update-crl-joblist joblist_lock=${rundir}/certs-update-crl-joblist.lock urllist=${vardir}/certs-update-crl-list urllist_lock=${rundir}/certs-update-crl-urllist.lock crllog=${logdir}/certs-update-crl.log crl_out_of_date=7776000 # 3month (3*30*24*60*60) atjob_delay=180 # 3min (3*60) atjob_run_cmd="/var/install/bin/certs-update-crl --quiet --single" max_lock_wait=30 # wait max. 30s wget_bin=/usr/bin/wget.sh wget_agent_1="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" #---------------------------------------------------------------------------------- # my own echo #---------------------------------------------------------------------------------- myecho () { EXEC_FUNCTION='===begin:myecho===' local _me_force_logging=0 local _me_label='' local _me_outstr local _me_switch if [ "$1" = "--log" ] then if [ ${force_logging} -eq 0 ] then _me_force_logging=1 fi shift fi case $1 in *-std ) _me_switch=$1 shift _me_label='' _me_outstr="`echo "$*" | sed -r 's/^(-)?-std //g'`" ;; *-info ) _me_switch=$1 shift # _me_label="Information: " _me_outstr="`echo "$*" | sed -r 's/^(-)?-info //g'`" ;; *-warn ) _me_switch=$1 shift # _me_label="Warning: " _me_outstr="`echo "$*" | sed -r 's/^(-)?-warn //g'`" ;; *-error ) _me_switch=$1 shift # _me_label="Error: " _me_outstr="`echo "$*" | sed -r 's/^(-)?-error //g'`" ;; * ) _me_switch='' _me_label='' _me_outstr="$*" ;; esac loglabel="`date +"%b %e %T"` `hostname` `basename $0`[${EXEC_INSTANCE}]:" if [ ${force_quiet_run} -eq 1 ] then # silent mode # Aug 7 13:01:34 hostname scriptname[PID]: echo "${loglabel} ${_me_label}${_me_outstr}" >> ${crllog} else # be verbose mecho ${_me_switch} "${_me_outstr}" if [ ${force_logging} -eq 1 -o ${_me_force_logging} -eq 1 ] then # ...and log to file # Aug 7 13:01:34 hostname scriptname[PID]: echo "${loglabel} ${_me_label}${_me_outstr}" >> ${crllog} fi fi EXEC_FUNCTION='===end:myecho===' } #---------------------------------------------------------------------------------- # generate uniq job execution time # # input : $1 - date/time stamp # return: uniq exec time #---------------------------------------------------------------------------------- uniq_job_exec_time () { EXEC_FUNCTION='===begin:uniq_job_exec_time===' local _ujet_date="$1" local _ujet_nomatch local _ujet_numeric local _ujet_ret local _ujet_tmpdate local _ujet_transfile if [ -f ${joblist} ] then # job list exists - check time stamp _ujet_transfile=`mktemp -p ${tmpdir}` _ujet_numeric=0 if is_numeric "${_ujet_date}" then # numeric date, convert it _ujet_numeric=1 _ujet_date=`date -d @${_ujet_date} +"%Y-%m-%d %H:%M"` fi _ujet_nomatch=0 while [ ${_ujet_nomatch} -eq 0 ] do { flock -s -w ${max_lock_wait} 200 cut -d'|' -f2 ${joblist} | grep -q "${_ujet_date}" _ujet_ret=$? # write output to transfer file echo "_ujet_ret=\"${_ujet_ret}\"" > ${_ujet_transfile} } 200> ${joblist_lock} # read data from transfer file . ${_ujet_transfile} rm -f ${_ujet_transfile} if [ ${_ujet_ret} -eq 0 ] then # time stamp not uniq - increas time by $atjob_delay _ujet_tmpdate=`date -d "${_ujet_date}" +"%s"` _ujet_tmpdate=`expr ${_ujet_tmpdate} + ${atjob_delay}` _ujet_date=`date -d @${_ujet_tmpdate} +"%Y-%m-%d %H:%M"` else # uniq time stamp found _ujet_nomatch=1 break fi done rm -f ${_ujet_transfile} if [ ${_ujet_numeric} -eq 1 ] then # convert date to numeric value _ujet_date=`date -d "${_ujet_date}" +"%s"` fi fi echo "${_ujet_date}" EXEC_FUNCTION='===end:uniq_job_exec_time===' } #---------------------------------------------------------------------------------- # extract crl uri from certificate file # # input : $1 - certificate file (full path) # return: crl uri or empty #---------------------------------------------------------------------------------- extract_crl_uri () { EXEC_FUNCTION='===begin:extract_crl_uri===' local _ecu_crl_found=0 local _ecu_file="$1" local _ecu_ocsp_found=0 local _ecu_ocsp_uri='' local _ecu_oldifs local _ecu_ret local _ecu_smin local _ecu_sstr local _ecu_snum local _ecu_transfile local _ecu_uri='' ### check if a crl uri exists ### # example: # # X509v3 CRL Distribution Points: # # Full Name: # URI:ldap://directory.d-trust.net/CN=D-TRUST%20Root%20Class%203%20CA%202%202009,O=D-Trust%20GmbH,C=DE?certificaterevocationlist # # Full Name: # URI:http://www.d-trust.net/crl/d-trust_root_class_3_ca_2_2009.crl # # Signature Algorithm: sha256WithRSAEncryption # 7f:97:db:30:c8:df:a4:9c:7d:21:7a:80:70:ce:14:12:69:88: # 14:95:60:44:01:ac:b2:e9:30:4f:9b:50:c2:66:d8:7e:8d:30: # b5:70:31:e9:e2:... _ecu_oldifs=$IFS IFS='' # 1. output certificate details # 2. keep only part from 'X509v3 CRL Distribution Points:' to end of file # 3. remove all empty lines # 4. check output line by line _ecu_smin=0 _ecu_transfile=`mktemp -p ${tmpdir}` ${openssl_bin} x509 -in "${_ecu_file}" -noout -text | sed -n '/X509v3 CRL Distribution Points:/,$p' | grep -v "^[[:space:]]*$" > ${_ecu_transfile} while read line do # get number of leading spaces _ecu_sstr="`echo "${line}" | sed 's/^\( *\).*$/\1/g'`" _ecu_snum=`expr length "${_ecu_sstr}"` # echo ":${ecu_snum}:${_ecu_sstr}:" if [ ${_ecu_smin} -eq 0 ] then # check if 'X509v3 CRL Distribution Points:' block starts echo "${line}" | grep -i -q "X509v3 CRL Distribution Points:" if [ $? -eq 0 ] then # if yes, keep number of leading spaces _ecu_smin=${_ecu_snum} fi else # check content of 'X509v3 CRL Distribution Points:' block if [ ${_ecu_snum} -le ${_ecu_smin} ] then # exit loop if number of leading spaces is less or equal # than the number from the start line break else # check if a http-uri exists and skip e.g. ldap-uri echo "${line}" | grep -i -q "URI:http" if [ $? -eq 0 ] then # if yes, output the uri address and exit loop # 1. remove leading URI label # 2. remove leading spaces # 3. convert the hostname part of the URI to lowercase _ecu_uri="CRL|`echo "${line}" | sed -e 's/.*URI://g' -e 's/ *//g' -e 's#://.*/#\L&#g'`" _ecu_crl_found=1 break fi fi fi done < ${_ecu_transfile} rm -f ${_ecu_transfile} IFS=${old_IFS} ### check if an ocsp uri exists ### # 1. take everything until '/' # 2. covert the URI to lowercase _ecu_ocsp_uri=`${openssl_bin} x509 -noout -in "${_ecu_file}" -ocsp_uri | sed 's#\([^/]\)$#\1/#' | tr '[:upper:]' '[:lower:]'` if [ -n "${_ecu_ocsp_uri}" ] then _ecu_ocsp_uri="OCSP|${_ecu_ocsp_uri}" _ecu_ocsp_found=1 fi if [ ${_ecu_ocsp_found} -eq 1 ] then if [ ${_ecu_crl_found} -eq 1 ] then # ocsp uri found, check if a crl uri already exists in urllist if [ -f ${urllist} ] then _ecu_ret=0 { flock -s -w ${max_lock_wait} 200 grep -q "^${_ecu_uri}" ${urllist} _ecu_ret=$? # write output to transfer file echo "_ecu_ret=\"${_ecu_ret}\"" > ${_ecu_transfile} } 200> ${urllist_lock} # read data from transfer file . ${_ecu_transfile} rm -f ${_ecu_transfile} if [ ${_ecu_ret} -ne 0 ] then # crl uri not found in urllist, use ocsp uri _ecu_uri="${_ecu_ocsp_uri}" fi fi else # only ocsp uri exists, use i _ecu_uri="${_ecu_ocsp_uri}" fi fi echo "${_ecu_uri}" EXEC_FUNCTION='===end:extract_crl_uri===' } #---------------------------------------------------------------------------------- # grep crl uri from single cetificate # # input : $1 - certificate file (without path) #---------------------------------------------------------------------------------- grep_crl_uri_from_single_cert () { EXEC_FUNCTION='===begin:grep_crl_uri_from_single_cert===' # make sure that .pem extension is not doubled local _gcufsc_certfile="${certdir}/`basename "${1}" '.pem'`.pem" local _gcufsc_short_certfile local _gcufsc_transfile local _gcufsc_crluri local _gcufsc_type local _gcufsc_search_uri local _gcufsc_ret if [ -f ${_gcufsc_certfile} ] then # certificates found, go on ... _gcufsc_short_certfile=`basename "${_gcufsc_certfile}"` _gcufsc_transfile=`mktemp -p ${tmpdir}` # get the crl uri _gcufsc_crluri=`extract_crl_uri "${_gcufsc_certfile}"` if [ -n "${_gcufsc_crluri}" ] then # crl uri found _gcufsc_type=`echo "${_gcufsc_crluri}" | cut -d'|' -f1 | tr '[:upper:]' '[:lower:]'` # remove identifier _gcufsc_crluri=`echo "${_gcufsc_crluri}" | cut -d'|' -f2` myecho "- file ${_gcufsc_short_certfile} ..." # process individual uri # change 'http://' and 'https://' to 'http[s]://' to match both protocols _gcufsc_search_uri=`echo "${_gcufsc_crluri}" | sed 's#^http[s]*://#http[s]?://#'` _gcufsc_ret=1 if [ -f ${urllist} ] then { flock -s -w ${max_lock_wait} 200 if [ "${_gcufsc_type}" = 'ocsp' ] then # concatenate ocsp uri with cert name to make it uniq grep -q -E "\|${_gcufsc_search_uri}!${_gcufsc_short_certfile}" ${urllist} else grep -q -E "\|${_gcufsc_search_uri}" ${urllist} fi _gcufsc_ret=$? # write output to transfer file echo "_gcufsc_ret=\"${_gcufsc_ret}\"" > ${_gcufsc_transfile} } 200> ${urllist_lock} # read data from transfer file . ${_gcufsc_transfile} rm -f ${_gcufsc_transfile} fi if [ ${_gcufsc_ret} -ne 0 ] then # URL doesn't exist in URL list, add it ... _gcufsc_no_update=0 if host_exists "${_gcufsc_crluri}" then # host exists if [ ${_gcufsc_no_update} -eq 0 ] then if [ -f ${urllist} ] then # backup existing file { flock -s -w ${max_lock_wait} 200 /var/install/bin/backup-file --quiet ${urllist} } 200> ${urllist_lock} fi # add header { flock -e -w ${max_lock_wait} 200 { echo echo "# automatically added on `date +"%Y-%m-%d %H:%M:%S"` (source: ${_gcufsc_short_certfile})" } >> ${urllist} } 200> ${urllist_lock} _gcufsc_no_update=1 fi # add new uri if [ "${_gcufsc_type}" = 'ocsp' ] then myecho --log " url '${_gcufsc_crluri}!${_gcufsc_short_certfile}' added to CRL list." else myecho --log " url '${_gcufsc_crluri}' added to CRL list." fi { flock -e -w ${max_lock_wait} 200 if [ "${_gcufsc_type}" = 'ocsp' ] then # concatenate ocsp uri with cert name to make it uniq echo "OCSP|${_gcufsc_crluri}!${_gcufsc_short_certfile}" >> ${urllist} else echo "CRL|${_gcufsc_crluri}" >> ${urllist} fi } 200> ${urllist_lock} fi else if [ "${_gcufsc_type}" = 'ocsp' ] then myecho --info " url '${_gcufsc_search_uri}!${_gcufsc_short_certfile}' already in CRL list." else myecho --info " url '${_gcufsc_search_uri}' already in CRL list." fi fi fi rm -f ${_gcufsc_transfile} else myecho --error "- file '${_gcufsc_certfile}' doesn't exist!" fi EXEC_FUNCTION='===end:grep_crl_uri_from_single_cert===' } #---------------------------------------------------------------------------------- # grep crl uri from cetificate(s) #---------------------------------------------------------------------------------- grep_crl_uri_from_certs () { EXEC_FUNCTION='===begin:grep_crl_uri_from_certs===' local _gcufc_oldifs local _gcufc_filelist local _gcufc_idx local _gcufc_no_update if [ -n "`find ${certdir} -maxdepth 1 -type f -name "*.pem" -printf '%p\n'`" ] then # certificates found, go on ... myecho --log "fetching CRL URLs from certificates ..." # separator set to newline (\n) to handle file names which contain spaces correctly _gcufc_oldifs="$IFS" IFS=$'\n' _gcufc_filelist=`find ${certdir} -maxdepth 1 -type f -name "*.pem" -printf '%f\n' | sort` _gcufc_idx=1 _gcufc_no_update=0 for CNAME in ${_gcufc_filelist} do # process each certificate grep_crl_uri_from_single_cert "${CNAME}" _gcufc_idx=`expr ${_gcufc_idx} + 1` done # myecho --log "done." IFS="${_gcufc_oldifs}" fi EXEC_FUNCTION='===end:grep_crl_uri_from_certs===' } #---------------------------------------------------------------------------------- # search certificates which are referring to given CRL URI # # input : $1 - CRL download URL # $2 - suppress search header # return: list of referring certificates #---------------------------------------------------------------------------------- search_crl_uri () { EXEC_FUNCTION='===begin:search_crl_uri===' local _scu_search_url="$1" local _scu_no_header=0 local _scu_crl_file local _scu_idx local _scu_no_update local _scu_tmpfile local _scu_certfile local _scu_short_certfile local _scu_crluri local _scu_type local _scu_tmpurl local _scu_certlist if [ "$2" = 'noheader' ] then _scu_no_header=1 fi if [ -n "${_scu_search_url}" ] then if [ -n "`find ${certdir} -maxdepth 1 -type f -name "*.pem" -printf '%p\n' -or -name "*.ocsp" -printf '%p\n'`" ] then _scu_crl_file='' grep -E -q "\|${_scu_search_url}\|" ${urllist} if [ $? -eq 0 ] then _scu_crl_file=`grep -E "\|${_scu_search_url}\|" ${urllist} | cut -d'|' -f3` fi if [ ${_scu_no_header} -eq 0 ] then myecho "searching referring certificates ..." fi myecho "- CRL file: ${_scu_crl_file}" myecho " CRL URI : ${_scu_search_url}" _scu_filelist=`find ${certdir} -maxdepth 1 -type f -name "*.pem" -printf '%p\n' -or -name "*.ocsp" -printf '%p\n'` _scu_idx=1 _scu_no_update=0 _scu_tmpfile=`mktemp -p ${tmpdir}` for CNAME in ${_scu_filelist} do # process each certificate _scu_certfile="${CNAME}" _scu_short_certfile=`basename "${_scu_certfile}"` _scu_crluri=`extract_crl_uri "${_scu_certfile}"` echo "${_scu_crluri}" | grep -E -i -q "^OCSP\|" if [ $? -eq 0 ] then _scu_type='ocsp' else _scu_type='crl' fi # remove identifier _scu_crluri=`echo "${_scu_crluri}" | sed -e 's/^OCSP|//' -e 's/CRL|//'` if [ -n "${_scu_crluri}" ] then # write match to temporary file if [ "${_scu_type}" = 'ocsp' ] then echo "${_scu_certfile}|${_scu_crluri}!${_scu_short_certfile}" >> ${_scu_tmpfile} else echo "${_scu_certfile}|${_scu_crluri}" >> ${_scu_tmpfile} fi fi done _scu_tmpurl=`echo "${_scu_search_url}" | sed -E 's#^http[s]?:#http[s]?:#'` _scu_certlist=`grep -E "\\|${_scu_tmpurl}" ${_scu_tmpfile}` if [ $? -eq 0 ] then # read match from temporary file for LINE in `echo "${_scu_certlist}" | sort` do _scu_certfile=`echo "${LINE}" | cut -d'|' -f1` _scu_certfile=`basename "${_scu_certfile}"` myecho --info " -> certificate '${_scu_certfile}' found." done else myecho --warn " -> no referring certificates found!" fi rm -f ${_scu_tmpfile} else myecho --warn "no certificates found in '${certdir}'!" fi else myecho --warn "no search URL given!" fi EXEC_FUNCTION='===end:search_crl_uri===' } #---------------------------------------------------------------------------------- # add CRL filename to list # # input : $1 - type # $2 - download URL # $3 - file in PEM format (without path) #---------------------------------------------------------------------------------- add_crl_filename_to_list () { EXEC_FUNCTION='===begin:add_crl_filename_to_list===' local _adf_type=`echo "$1" | tr '[:upper:]' '[:lower:]'` local _adf_crl_url="$2" local _adf_crl_pem_file="`basename "$3"`" local _adf_transfile=`mktemp -p ${tmpdir}` local _adf_ret=1 local _adf_no_update if [ -f ${urllist} ] then { flock -s -w ${max_lock_wait} 200 grep -E -q "\|${_adf_crl_url}" ${urllist} _adf_ret=$? # write output to transfer file echo "_adf_ret=\"${_adf_ret}\"" > ${_adf_transfile} } 200> ${urllist_lock} # read data from transfer file . ${_adf_transfile} rm -f ${_adf_transfile} fi if [ ${_adf_ret} -eq 0 ] then # download URL already exists in list file myecho "- updating CRL list ..." _adf_tmpfile=`mktemp -p ${tmpdir}` { flock -e -w ${max_lock_wait} 200 cp ${urllist} ${_adf_tmpfile} sed "s#|${_adf_crl_url}.*#|${_adf_crl_url}|${_adf_crl_pem_file}#g" ${_adf_tmpfile} > ${urllist} } 200> ${urllist_lock} rm -f ${_adf_tmpfile} else # download URL doesn't exist in list file, add it ... _adf_no_update=0 if host_exists "${_adf_crl_url}" then # host exists if [ ${_adf_no_update} -eq 0 ] then if [ -f ${urllist} ] then # backup existing file { flock -s -w ${max_lock_wait} 200 /var/install/bin/backup-file --quiet ${urllist} } 200> ${urllist_lock} fi # add header { flock -e -w ${max_lock_wait} 200 { echo echo "# automatically added on `date +"%Y-%m-%d %H:%M:%S"`" } >> ${urllist} } 200> ${urllist_lock} _adf_no_update=1 fi # add new uri myecho --log "- url '${_adf_crl_url}' added to CRL list." { flock -e -w ${max_lock_wait} 200 if [ "${_adf_type}" = 'ocsp' ] then echo "OCSP|${_adf_crl_url}" >> ${urllist} else echo "CRL|${_adf_crl_url}" >> ${urllist} fi } 200> ${urllist_lock} fi fi EXEC_FUNCTION='===end:add_crl_filename_to_list===' } #---------------------------------------------------------------------------------- # check if CRL files exist #---------------------------------------------------------------------------------- check_if_crl_files_exist () { EXEC_FUNCTION='===begin:check_if_crl_files_exist===' local _ccf_transfile local _ccf_type local _ccf_url local _ccf_tmpfile local _ccf_file if [ -f ${urllist} ] then myecho "checking if CRL files exist ..." _ccf_transfile=`mktemp -p ${tmpdir}` # create cleaned url list, without comments, ldap[s]:// and empty lines { flock -s -w ${max_lock_wait} 200 grep -E -v "^#|^ *$|\|ldap[s]?:" ${urllist} > ${_ccf_transfile} } 200> ${urllist_lock} for LINE in `cat ${_ccf_transfile}` do _ccf_type=`echo "${LINE}" | cut -d'|' -f1 | tr '[:upper:]' '[:lower:]'` _ccf_url=`echo "${LINE}" | cut -d'|' -f2` _ccf_tmpfile=`echo "${LINE}" | cut -d'|' -f3` _ccf_file="`basename "${_ccf_tmpfile}"`" if [ -z "${_ccf_file}" ] || [ ! -f ${crldir}/${_ccf_file} ] then # CRL file not known or file doesn't exist # download CRL and add file to list if process_single_crl "${_ccf_type}" "${_ccf_url}" then force_crl_rehash=1 fi else # CRL file exists, make sure that an at-job exists create_single_at_job "${_ccf_type}" "${_ccf_url}" "${_ccf_file}" fi done rm -f ${_ccf_transfile} fi EXEC_FUNCTION='===end:check_if_crl_files_exist===' } #---------------------------------------------------------------------------------- # check if crl is valid # # input : $1 - name of CRL file # return: 0 - valid # 1 - not valid #---------------------------------------------------------------------------------- is_valid_crl () { EXEC_FUNCTION='===begin:is_valid_crl===' local _ivc_crl_file="$1" ${openssl_bin} crl -CApath ${certdir} -in ${_ivc_crl_file} -noout } #---------------------------------------------------------------------------------- # download crl from url # # input : $1 - type # $2 - download URL # $3 - temporary directory # return: 0 - successful # 1 - not successful #---------------------------------------------------------------------------------- download_crl () { EXEC_FUNCTION='===begin:download_crl===' local _dc_type=`echo "$1" | tr '[:upper:]' '[:lower:]'` local _dc_url="$2" local _dc_crltmpdir="$3" local _dc_cert local _dc_result local _dc_ret local _dc_uagent local _dc_wget_options local _dc_crlhost local _dc_crlname local _dc_crldestname if [ -n "${_dc_crltmpdir}" -a -d ${_dc_crltmpdir} ] then # temporary directory given - check if crl file exists myecho "- downloading '${_dc_url}' ..." if [ "${_dc_type}" = 'ocsp' ] then ### OCSP ### # http://ocsp.int-x3.letsencrypt.org/!eis.example.com.pem # -> 'eis.example.com.pem' _dc_cert=`echo "${_dc_url}" | sed 's/^.*\!//'` _dc_result=`/var/install/bin/certs-show-chain --nogui --tableview "${_dc_cert}" | sed 's/^.*:\([A-Za-z]*\):/\1/'` if [ "${_dc_result}" = 'OK' ] then _dc_ret=0 else # error myecho --warn " file '${_dc_url}' download failed!" _dc_ret=1 fi else ### CRL ### _dc_uagent='' for UAGENT in '' "${wget_agent_1}" do if [ -n "${UAGENT}" ] then ${wget_bin} --user-agent="${UAGENT}" --quiet --spider --timeout=30 --tries=1 ${_dc_url} _dc_uagent="${UAGENT}" else ${wget_bin} --quiet --spider --timeout=30 --tries=1 ${_dc_url} fi _dc_ret=$? if [ ${_dc_ret} -eq 0 ] then break fi done if [ ${_dc_ret} -eq 0 ] then # crl file exists, download it if [ ${force_quiet_run} -eq 0 ] then _dc_wget_options='--show-progress' else _dc_wget_options='' fi # use user-agent based on previous crl check if [ -n "${_dc_uagent}" ] then ${wget_bin} ${_dc_wget_options} --user-agent="${_dc_uagent}" --quiet --timeout=300 --tries=1 -P ${_dc_crltmpdir} ${_dc_url} else ${wget_bin} ${_dc_wget_options} --quiet --timeout=300 --tries=1 -P ${_dc_crltmpdir} ${_dc_url} fi _dc_ret=$? if [ ${_dc_ret} -eq 0 ] then # add server name to crl-file _dc_crlhost=`echo "${_dc_url}" | sed -e 's#^.*:\/\/##g' -e 's#\/.*$##g'` _dc_crlname=`find ${_dc_crltmpdir} -maxdepth 1 -type f -printf '%f\n' | head -1` # remove unwanted characters and spaces from file name _dc_crldestname=`echo "${_dc_crlname}" | sed 's#[()]#-#g' | tr -s ' ' '_'` # rename crl-file if [ -f "${_dc_crltmpdir}/${_dc_crlname}" ] then mv "${_dc_crltmpdir}/${_dc_crlname}" ${_dc_crltmpdir}/${_dc_crlhost}-${_dc_crldestname} fi else # error myecho --warn " file '${_dc_url}' download failed!" fi else # error myecho --warn " file '${_dc_url}' doesn't exist!" fi fi else # error myecho --error " file '${_dc_url}' download impossible because the temporary directory doesn't exist!" _dc_ret=1 fi EXEC_FUNCTION='===end:download_crl===' return ${_dc_ret} } #---------------------------------------------------------------------------------- # convert DER to PEM certificate # # input : $1 - file name of CRL in DER format (with path) # return: 0 - successful # 1 - not successful #---------------------------------------------------------------------------------- convert_der_to_pem_cert () { EXEC_FUNCTION='===begin:convert_der_to_pem_cert===' local _cdtpc_der_cert="$1" local _cdtpc_ret=1 # check file type local _cdtpc_ftype=`/usr/bin/file "${_cdtpc_der_cert}" | cut -d: -f2 | sed 's/ *//g'` local _cdtpc_pem_cert="${crldir}/`basename "${_cdtpc_der_cert}" ".pem"`.pem" case ${_cdtpc_ftype} in 'data' ) case "${_cdtpc_der_cert}" in *.cer ) # ignore certifikates myecho --warn " not a CRL file, will be ignored." ;; * ) # DER format, convert it to PEM format myecho " converting CRL file to PEM format ..." ${openssl_bin} crl -inform DER -outform PEM -in "${_cdtpc_der_cert}" -out "${_cdtpc_pem_cert}" _cdtpc_ret=0 ;; esac ;; 'ASCIItext' ) # already in PEM format, only copy it myecho --info "- CRL file already in PEM format." cp "${_cdtpc_der_cert}" "${_cdtpc_pem_cert}" _cdtpc_ret=0 ;; 'ASCIItext,withCRLFlineterminators' ) # already in PEM format, but in DOS file format myecho --info "- CRL file already in PEM format (dos)." /usr/bin/dtou "${_cdtpc_der_cert}" cp "${_cdtpc_der_cert}" "${_cdtpc_pem_cert}" _cdtpc_ret=0 ;; * ) myecho --error "- unknown CRL file format '${_cdtpc_ftype}'." ;; esac EXEC_FUNCTION='===end:convert_der_to_pem_cert===' return ${_cdtpc_ret} } #---------------------------------------------------------------------------------- # show single URL from certificate # # input : $1 - certificate file (without path) # return: CRL type|CRL URI #---------------------------------------------------------------------------------- show_single_uri () { EXEC_FUNCTION='===begin:show_single_uri===' # make sure that .pem extension is not doubled local _ssu_certfile="${certdir}/`basename "${1}" '.pem'`.pem" local _ssu_crluri if [ -f ${_ssu_certfile} ] then # certificates found, go on ... _ssu_crluri=`extract_crl_uri "${_ssu_certfile}"` fi echo "${_ssu_crluri}" EXEC_FUNCTION='===end:show_single_uri===' } #---------------------------------------------------------------------------------- # process all CRLs # # return: 0 - successful # 1 - not successful #---------------------------------------------------------------------------------- process_all_crls () { EXEC_FUNCTION='===begin:process_all_crls===' local _pac_ret=0 local _pac_transfile local _pac_type local _pac_url if [ -f ${urllist} ] then myecho "update all CRL files ..." # read cleaned url list and download given crls _pac_transfile=`mktemp -p ${tmpdir}` { flock -s -w ${max_lock_wait} 200 grep -E -v "^#|^ *$" ${urllist} | cut -d'|' -f1,2 > ${_pac_transfile} } 200> ${urllist_lock} for LINE in `cat ${_pac_transfile}` do _pac_type=`echo "${LINE}" | cut -d'|' -f1 | tr '[:upper:]' '[:lower:]'` _pac_url=`echo "${LINE}" | cut -d'|' -f2` if process_single_crl "${_pac_type}" "${_pac_url}" then force_crl_rehash=1 else # error _pac_ret=1 fi done rm -f ${_pac_transfile} else # error myecho --error "list file '${urllist}' doesn't exist - script aborted!" fi EXEC_FUNCTION='===end:process_all_crls===' return ${_pac_ret} } #---------------------------------------------------------------------------------- # process a single CRL # # input : $1 - type # $2 - download URL # return: 0 - successful # 1 - not successful #---------------------------------------------------------------------------------- process_single_crl () { EXEC_FUNCTION='===begin:process_single_crl===' local _psc_type=`echo "$1" | tr '[:upper:]' '[:lower:]'` local _psc_url="$2" local _psc_ret=1 local _psc_crltmpdir local _psc_pem_crl local _psc_der_crl local _psc_tmpfile if [ -n "${_psc_url}" ] then # create temporary directory _psc_crltmpdir=`mktemp -d -p ${tmpdir}` # update single CRL file if download_crl "${_psc_type}" "${_psc_url}" "${_psc_crltmpdir}" then # CRL download successful if [ "${_psc_type}" = 'ocsp' ] then ### OCSP ### # only OCSP responses in DER format are supported # derive name of OCSP response file from certificate file name _psc_pem_crl=`echo "${_psc_url}" | sed 's/^.*!\(.*\).pem/\1.ocsp/'` add_crl_filename_to_list "${_psc_type}" "${_psc_url}" "${_psc_pem_crl}" create_single_at_job "${_psc_type}" "${_psc_url}" "${_psc_pem_crl}" else ### CRL ### # convert certificates from DER to PEM format _psc_der_crl=`find ${_psc_crltmpdir} -maxdepth 1 -type f -printf '%p\n' | head -1` if [ -n "${_psc_der_crl}" -a -s "${_psc_der_crl}" ] then # downloaded file found, make sure that .pem extension is not doubled _psc_pem_crl="`basename "${_psc_der_crl}" ".pem"`.pem" if convert_der_to_pem_cert "${_psc_der_crl}" then add_crl_filename_to_list "${_psc_type}" "${_psc_url}" "${_psc_pem_crl}" create_single_at_job "${_psc_type}" "${_psc_url}" "${_psc_pem_crl}" fi fi fi _psc_ret=0 else EXEC_FUNCTION='===process_single_crl:download_failed-reschedule_job===' if [ -f ${urllist} ] then grep -q "^${_psc_url}" ${urllist} if [ $? -eq 0 ] then if [ "${_psc_type}" = 'ocsp' ] then ### OCSP ### # derive name of OCSP response file from certificate file name _psc_pem_crl=`echo "${_psc_url}" | sed 's/^.*!\(.*\).pem/\1.ocsp/'` else ### CRL ### _psc_tmpfile=`grep "^${_psc_url}" ${urllist} | cut -d'|' -f2` _psc_pem_crl="`basename "${_psc_tmpfile}"`" fi create_single_at_job "${_psc_type}" "${_psc_url}" "${_psc_pem_crl}" fi fi fi # remove temporary directory rm -fr ${_psc_crltmpdir} else myecho --warn "no download URL given!" fi EXEC_FUNCTION='===end:process_single_crl===' return ${_psc_ret} } #---------------------------------------------------------------------------------- # create single at-job to download/update CRL # # input : $1 - type: 'ocsp' or 'crl' # $2 - download URL # $3 - file name in PEM format (without path) or '' (empty) # return: 0 - successful # 1 - not successful #---------------------------------------------------------------------------------- create_single_at_job () { EXEC_FUNCTION='===begin:create_single_at_job===' # create/update url list of jobs if [ -f ${urllist} ] then _csj_type=`echo "$1" | tr '[:upper:]' '[:lower:]'` _csj_url="$2" _csj_file="`basename "$3"`" _csj_mflag=0 # flag to indicate date move _csj_invalid_nextdate=0 # flag to indicate invalid nextUpdate date _csj_ret=1 # check how many matches exist in job list _csj_matches=0 if [ -f ${joblist} ] then { flock -s -w ${max_lock_wait} 200 _csj_matches=`grep -E "\|${_csj_url}" ${joblist} | wc -l` } 200> ${joblist_lock} if [ ${_csj_matches} -gt 1 ] then # more than one entry exist, keep only newest one _csj_del_count=`expr ${_csj_matches} - 1` { flock -s -w ${max_lock_wait} 200 _csj_del_jobs=`grep -E "\|${_csj_url}" ${joblist} | sort | head -n ${_csj_del_count} | cut -d'|' -f1` } 200> ${joblist_lock} for JOB in ${_csj_del_jobs} do remove_at_job_from_list "${JOB}" done fi fi if [ -n "${_csj_file}" -a -s "${crldir}/${_csj_file}" ] then if [ "${_csj_type}" = 'ocsp' ] then ### OCSP ### # OCSP response file exists # This Update: Dec 15 21:00:00 2017 GMT _csj_tmpdate=`${openssl_bin} ocsp -respin ${crldir}/${_csj_file} -noverify -resp_text | grep -i " This Update:" | sed 's/^.*This Update: *//i'` _csj_lastdate=`date -d "${_csj_tmpdate}" +"%Y-%m-%d %H:%M"` # Next Update: Dec 22 21:00:00 2017 GMT _csj_tmpdate=`${openssl_bin} ocsp -respin ${crldir}/${_csj_file} -noverify -resp_text | grep -i " Next Update:" | sed 's/^.*Next Update: *//i'` _csj_nextdate=`date -d "${_csj_tmpdate}" +"%Y-%m-%d %H:%M" 2>/dev/null` else ### CRL ### # CRL file exists, go on... # lastUpdate=Nov 9 13:50:35 2013 GMT _csj_tmpdate=`${openssl_bin} crl -in ${crldir}/${_csj_file} -lastupdate -noout | sed 's/^lastUpdate=//g'` _csj_lastdate=`date -d "${_csj_tmpdate}" +"%Y-%m-%d %H:%M"` # nextUpdate=Nov 19 13:50:35 2013 GMT _csj_tmpdate=`${openssl_bin} crl -in ${crldir}/${_csj_file} -nextupdate -noout | sed 's/^nextUpdate=//g'` _csj_nextdate=`date -d "${_csj_tmpdate}" +"%Y-%m-%d %H:%M" 2>/dev/null` fi if [ $? -ne 0 ] then # invalid date returned, e.g. because of 'nextUpdate=NONE' _csj_invalid_nextdate=1 fi if [ -f ${joblist} ] then { flock -s -w ${max_lock_wait} 200 LINE=`grep -E "\|${_csj_url}" ${joblist}` _csj_ret=$? } 200> ${joblist_lock} fi if [ ${_csj_ret} -eq 0 ] then # an at-job already exists - compare time stamps _csj_atjob_nbr=`echo "${LINE}" | cut -d'|' -f1` _csj_atjob_tmpdate=`echo "${LINE}" | cut -d'|' -f2` _csj_atjob_date=`date -d "${_csj_atjob_tmpdate}" +"%s"` if [ ${_csj_invalid_nextdate} -eq 0 ] then _csj_new_date=`date -d "${_csj_nextdate}" +"%s"` if [ ${_csj_new_date} -gt ${_csj_atjob_date} ] then # excution date not up-to-date - remove old entry _csj_mflag=1 _csj_ret=1 _csj_execdate_org=`date -d @${_csj_atjob_date} +"%Y-%m-%d %H:%M"` atrm ${_csj_atjob_nbr} >/dev/null 2>/dev/null remove_at_job_from_list "${_csj_atjob_nbr}" elif [ `expr ${_csj_new_date} + ${crl_out_of_date}` -lt ${_csj_atjob_date} ] then # CRL hasn't been update for more than CRL out-of-date period, e.g. 3 month myecho --warn "- CRL file '${_csj_file}' hasn't been updated for a long time!" remove_at_job_from_list "${_csj_atjob_nbr}" fi else # invalid nextUpdate date in CRL file! myecho --warn "- CRL file '${_csj_file}' contains an invalid nextUpdate entry (${_csj_nextdate})!" remove_at_job_from_list "${_csj_atjob_nbr}" fi fi else # CRL file doesn't exist, make sure that a download is # forced instantly by setting the update date to the past _csj_nextdate=`date -d "1 days ago" +"%Y-%m-%d %H:%M"` if [ -n "${_csj_file}" ] then myecho --warn "- CRL file '${_csj_file}' doesn't exist, force download!" else myecho --warn "- CRL file doesn't exist, force download!" fi fi if [ ${_csj_ret} -ne 0 -o ! -f ${joblist} ] then # no job exists for URL - add a new one _csj_currdate=`date +"%s"` # add at-job delay (atjob_delay) to execution time _csj_tmpdate=`date -d "${_csj_nextdate}" +"%s"` _csj_tmpdate=`expr ${_csj_tmpdate} + ${atjob_delay}` if [ "${_csj_tmpdate}" -le ${_csj_currdate} ] then # job execution date is in the past - run it instantly _csj_execdate_org=`date -d @${_csj_tmpdate} +"%Y-%m-%d %H:%M"` _csj_tmpdate=`expr ${_csj_currdate} + ${atjob_delay}` _csj_mflag=2 fi # at-job command format: at 10:04 2013-12-25 _csj_tmpdate=`uniq_job_exec_time "${_csj_tmpdate}"` _csj_execdate=`date -d @${_csj_tmpdate} +"%H:%M %Y-%m-%d"` # job execution date is in the future # 1. cd to /tmp to guaranty that the execution directory is # accessible during job execution # 2. add at-job to queue and grep at-job number _csj_atjob_tmp_file=`mktemp -p ${tmpdir}` cd ${tmpdir} echo "${atjob_run_cmd} '${_csj_url}'" | at ${_csj_execdate} >> ${_csj_atjob_tmp_file} 2>> ${_csj_atjob_tmp_file} _csj_ret=$? echo "at-job exec code:${_csj_ret}" >> ${_csj_atjob_tmp_file} if [ ${_csj_ret} -eq 0 ] then # at-job successfully created _csj_atjob_nbr=`sed -e '/^job .* at/!d' -e 's/^job //' -e 's/ at.*$//' ${_csj_atjob_tmp_file}` case ${_csj_mflag} in 1) myecho --info "- job '${_csj_atjob_nbr}' (${_csj_execdate_org}->`date -d "${_csj_execdate}" +"%Y-%m-%d %H:%M"`) updated." ;; 2) myecho --info "- job '${_csj_atjob_nbr}' (${_csj_execdate_org}->`date -d "${_csj_execdate}" +"%Y-%m-%d %H:%M"`) created." ;; *) myecho "- job '${_csj_atjob_nbr}' (`date -d "${_csj_execdate}" +"%Y-%m-%d %H:%M"`) created." ;; esac myecho " url: ${_csj_url}" # store at-job information in list of jobs { flock -e -w ${max_lock_wait} 200 # check if at-job already exists in job list _csj_matches=`grep -E "\|${_csj_url}" ${joblist} | wc -l` if [ ${_csj_matches} -gt 0 ] then # at-job(s) with this url already exist(s), delete it ... _csj_tmpfile=`mktemp -p ${tmpdir}` cp ${joblist} ${_csj_tmpfile} grep -E -v "\|${_csj_url}" ${_csj_tmpfile} > ${joblist} rm -f ${_csj_tmpfile} fi echo "${_csj_atjob_nbr}|`date -d "${_csj_execdate}" +"%Y-%m-%d %H:%M"`|${_csj_lastdate}|${_csj_url}" >> ${joblist} } 200> ${joblist_lock} else myecho --error "- job creation for '${_csj_url}' failed!" fi rm -f ${_csj_atjob_tmp_file} else # job already exists if [ -n "${_csj_atjob_nbr}" -a -n "${_csj_atjob_date}" ] then _csj_execdate_org=`date -d @${_csj_atjob_date} +"%Y-%m-%d %H:%M"` myecho --info "- job '${_csj_atjob_nbr}' (${_csj_execdate_org}) already exists." fi fi fi EXEC_FUNCTION='===end:create_single_at_job===' } #---------------------------------------------------------------------------------- # create at-job to download/update CRL #---------------------------------------------------------------------------------- create_at_jobs () { EXEC_FUNCTION='===begin:create_at_jobs===' local _caj_lines local _caj_type local _caj_url local _caj_tmpfile local _caj_file # create/update list of jobs if [ -f ${urllist} ] then myecho "creating at-jobs ..." # create cleaned url list { flock -s -w ${max_lock_wait} 200 _caj_lines=`grep -E -v "^#|^ *$" ${urllist}` } 200> ${urllist_lock} for LINE in ${_caj_lines} do _caj_type=`echo "${LINE}" | cut -d'|' -f1 | tr '[:upper:]' '[:lower:]'` _caj_url=`echo "${LINE}" | cut -d'|' -f2` _caj_tmpfile=`echo "${LINE}" | cut -d'|' -f3` _caj_file="`basename "${_caj_tmpfile}"`" create_single_at_job "${_caj_type}" "${_caj_url}" "${_caj_file}" done fi EXEC_FUNCTION='===end:create_at_jobs===' } #---------------------------------------------------------------------------------- # remove at-job from at-job list # # input : $1 - at-job number #---------------------------------------------------------------------------------- remove_at_job_from_list () { EXEC_FUNCTION='===begin:remove_at_job_from_list===' local _rjfl_jobnbr="$1" local _rjfl_tmpfile if [ -f ${joblist} ] then # remove job from list _rjfl_tmpfile=`mktemp -p ${tmpdir}` { flock -e -w ${max_lock_wait} 200 cp ${joblist} ${_rjfl_tmpfile} grep -E -v "^${_rjfl_jobnbr}\|" ${_rjfl_tmpfile} > ${joblist} } 200> ${joblist_lock} rm -f ${_rjfl_tmpfile} fi # delete joblist if file size is zero if [ ! -s ${joblist} ] then rm -f ${joblist} fi EXEC_FUNCTION='===end:remove_at_job_from_list===' } #---------------------------------------------------------------------------------- # delete at jobs #---------------------------------------------------------------------------------- delete_at_jobs () { EXEC_FUNCTION='===begin:delete_at_jobs===' myecho "deleting at-jobs ..." # create cleaned url list _daj_lines='' if [ -f ${joblist} ] then { flock -s -w ${max_lock_wait} 200 _daj_lines=`grep -E -v "^#|^ *$" ${joblist}` } 200> ${joblist_lock} fi # separator set to newline (\n) to handle file names which contain spaces correctly if [ -n "${_daj_lines}" ] then _daj_oldifs="$IFS" IFS=$'\n' for LINE in ${_daj_lines} do _daj_atjob_nbr=`echo "${LINE}" | cut -d'|' -f1` # 2013-12-25 10:00:22+01:00 _daj_tmpdate=`echo "${LINE}" | cut -d'|' -f2` _daj_execdate=`date -d "${_daj_tmpdate}" +"%Y-%m-%d %H:%M"` # attention: grep followed by a TAB! -> use Perl-style grep _daj_atq_line=`atq | grep -P "^${_daj_atjob_nbr}\t"` if [ $? -eq 0 ] then # atq output: 968 Wed Dec 25 10:04:00 2013 C root _daj_atq_tmpdate=`echo "${_daj_atq_line}" | tr -s '\t ' ' ' | cut -d' ' -f2-6` _daj_atq_date=`date -d "${_daj_atq_tmpdate}" +"%Y-%m-%d %H:%M"` { flock -s -w ${max_lock_wait} 200 _daj_atjob_url=`grep -E "^${_daj_atjob_nbr}\|" ${joblist} | cut -d'|' -f4` } 200> ${joblist_lock} if [ "${_daj_execdate}" = "${_daj_atq_date}" ] then # removing at-job myecho "- job '${_daj_atjob_nbr}' (${_daj_execdate}) deleted." myecho " url: ${_daj_atjob_url}" atrm ${_daj_atjob_nbr} >/dev/null 2>/dev/null remove_at_job_from_list "${_daj_atjob_nbr}" fi else # removing job from list myecho --warn "- job '${_daj_atjob_nbr}' (${_daj_execdate}) doesn't exist!" remove_at_job_from_list "${_daj_atjob_nbr}" fi done IFS="${_daj_oldifs}" else myecho "no running jobs were found!" fi EXEC_FUNCTION='===end:delete_at_jobs===' } #---------------------------------------------------------------------------------- # list active at jobs # # input : $1 - optional: 'nextjob' - show next job only #---------------------------------------------------------------------------------- list_at_jobs () { EXEC_FUNCTION='===begin:list_at_jobs===' _laj_cmd="$1" _laj_tmpfile=`mktemp -p ${tmpdir}` if [ "${_laj_cmd}" = "nextjob" ] then myecho "listing next at-job ..." else myecho "listing at-jobs ..." fi _laj_atq_line=`atq` # separator set to newline (\n) to handle file names which contain spaces correctly oldifs="$IFS" IFS=$'\n' # write output to temp file and sort it based on the date/time stamp { for LINE in ${_laj_atq_line} do # attention: grep followed by a TAB! # atq output: 968 Wed Dec 25 10:04:00 2013 C root _laj_atq_job_nbr=`echo "${LINE}" | tr -s '\t ' ' ' | cut -d' ' -f1` _laj_atq_tmpdate=`echo "${LINE}" | tr -s '\t ' ' ' | cut -d' ' -f2-6` _laj_atq_date=`date -d "${_laj_atq_tmpdate}" +"%Y-%m-%d %H:%M"` # check job url from job list _laj_atjob_url='' if [ -f ${joblist} ] then { flock -s -w ${max_lock_wait} 200 _laj_atjob_url=`grep -E "^${_laj_atq_job_nbr}\|" ${joblist} | cut -d'|' -f4` } 200> ${joblist_lock} fi if [ -n "${_laj_atjob_url}" ] then # job found in job list echo -n "--std|- (${_laj_atq_date}) job '${_laj_atq_job_nbr}' found.|" echo "--std| url: ${_laj_atjob_url}" else echo "--warn|- job '${_laj_atq_job_nbr}' (${_laj_atq_date}) no job details found!|" fi done } | sort -k 2 > ${_laj_tmpfile} IFS="${oldifs}" # output while read LINE do _laj_cmd1=` echo "${LINE}" | cut -d'|' -f1` _laj_line1=`echo "${LINE}" | cut -d'|' -f2` myecho $_laj_cmd1 "$_laj_line1" _laj_cmd2=` echo "${LINE}" | cut -d'|' -f3` if [ -n "${_laj_cmd2}" ] then _laj_line2=`echo "${LINE}" | cut -d'|' -f4` myecho $_laj_cmd2 "$_laj_line2" fi if [ "$1" = "nextjob" ] then break fi done < ${_laj_tmpfile} rm -f ${_laj_tmpfile} EXEC_FUNCTION='===end:list_at_jobs===' } #---------------------------------------------------------------------------------- # check if all at-jobs listed in the job list are running # # return: 0 - all at-jobs exist # 1 - one or more at-jobs don't exist #---------------------------------------------------------------------------------- check_if_jobs_are_running () { EXEC_FUNCTION='===begin:check_if_jobs_are_running===' myecho "checking if a task for each at-job has been created ..." _cijr_flag=0 # grep number of running tasks (only for information purposes) _cijr_atq_count=`atq | wc -l` # grep list of running tasks _cijr_atq_line=" `atq | cut -f1 | sort | tr '\n' ' '` " _cijr_transfile=`mktemp -p ${tmpdir}` if [ -f ${joblist} ] then # job list exists, grep number of jobs (only for information purposes) { flock -s -w ${max_lock_wait} 200 _cijr_job_count=`wc -l ${joblist} | cut -d' ' -f1` _cijr_jobs=`cut -d'|' -f1 ${joblist} | tr '\n' ' '` # write output to transfer file { echo "_cijr_job_count=${_cijr_job_count}" echo "_cijr_jobs=\"${_cijr_jobs}\"" } > ${_cijr_transfile} } 200> ${joblist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} # check at-jobs in job list and verify if running tasks exist for JOB in ${_cijr_jobs} do echo "${_cijr_atq_line}" | grep -q " ${JOB} " if [ $? -ne 0 ] then # job doesn't exist _cijr_flag=1 # show crl details { flock -s -w ${max_lock_wait} 200 _cijr_atjob_tmpdate=`grep -E "^${JOB}\|" ${joblist} | cut -d'|' -f2` _cijr_atjob_url=`grep -E "^${JOB}\|" ${joblist} | cut -d'|' -f4` # write output to transfer file { echo "_cijr_atjob_tmpdate=\"${_cijr_atjob_tmpdate}\"" echo "_cijr_atjob_url=\"${_cijr_atjob_url}\"" } > ${_cijr_transfile} } 200> ${joblist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} _cijr_atjob_date=`date -d "${_cijr_atjob_tmpdate}" +"%s"` _cijr_atjob_execdate=`date -d "${_cijr_atjob_tmpdate}" +"%Y-%m-%d %H:%M"` myecho --warn "- job '${JOB}' (${_cijr_atjob_execdate}) doesn't exist!" myecho " url: ${_cijr_atjob_url}" # check if next update is long in the past _cijr_tmpfile='' if [ -f ${urllist} ] then { flock -s -w ${max_lock_wait} 200 _cijr_tmpfile=`grep "^${_cijr_atjob_url}" ${urllist} | cut -d'|' -f2` # write output to transfer file echo "_cijr_tmpfile=${_cijr_tmpfile}" > ${_cijr_transfile} } 200> ${urllist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} fi if [ -n "${_cijr_tmpfile}" ] then _cijr_file="`basename "${_cijr_tmpfile}"`" if [ -f ${crldir}/${_cijr_file} ] then # nextUpdate=Nov 19 13:50:35 2013 GMT _cijr_tmpdate=`${openssl_bin} crl -in ${crldir}/${_cijr_file} -nextupdate -noout | sed 's/^nextUpdate=//g'` _cijr_nextdate=`date -d "${_cijr_tmpdate}" +"%Y-%m-%d %H:%M"` _cijr_new_date=`date -d "${_cijr_nextdate}" +"%s"` if [ `expr ${_csj_new_date} + ${crl_out_of_date}` -lt ${_cijr_atjob_date} ] then # CRL hasn't been update for more than CRL out-of-date period, e.g. 3 month myecho --warn " file: '${_cijr_file}' - hasn't been updated for a long time!" else myecho " file: '${_cijr_file}'" fi else myecho --error " file: '${_cijr_file}' doesn't exist!" fi fi else # job exists, remove it from list _cijr_atq_line="`echo "${_cijr_atq_line}" | sed "s/ ${JOB} / /"`" fi done fi myecho "checking if remaining running tasks are CRL update tasks ..." if [ -n "`echo ${_cijr_atq_line}`" ] then # remaining running tasks exist - check if they're CRL update tasks for JOB in ${_cijr_atq_line} do _cijr_tmpfile='' # read at-job command and compare it with CRL update command _cijr_atjob_cmd=`at -c ${JOB} | grep "${atjob_run_cmd}"` if [ $? -eq 0 ] then # CRL update command found in job details - grep execution date/time # attention: grep followed by a TAB! -> use Perl-style grep _cijr_atq_line=`atq | grep -P "^${JOB}\t"` if [ $? -eq 0 ] then # atq output: 968 Wed Dec 25 10:04:00 2013 C root _cijr_atq_tmpdate=`echo "${_cijr_atq_line}" | tr -s '\t ' ' ' | cut -d' ' -f2-6` _cijr_atq_date=`date -d "${_cijr_atq_tmpdate}" +"%Y-%m-%d %H:%M"` fi # extract CRL URL from update command string _cijr_atjob_url="`echo "${_cijr_atjob_cmd}" | sed -e "s#.*${atjob_run_cmd} *##" -e 's/ *\"//' -e 's/\" *$//'`" if [ -n "${_cijr_atjob_url}" ] then # check if a newer job exists for that URL _cijr_atjob_nbr='' if [ -f ${joblist} ] then { flock -s -w ${max_lock_wait} 200 _cijr_atjob_nbr=`grep -E "\|${_cijr_atjob_url}" ${joblist} | cut -d'|' -f1` # write output to transfer file echo "_cijr_atjob_nbr=${_cijr_atjob_nbr}" > ${_cijr_transfile} } 200> ${joblist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} fi if [ -n "${_cijr_atjob_nbr}" ] then # a newer job exists - remove outdated job { flock -s -w ${max_lock_wait} 200 _cijr_atjob_execdate=`grep "^${_cijr_atjob_url}" ${joblist} | cut -d'|' -f2` # write output to transfer file echo "_cijr_atjob_execdate=${_cijr_atjob_execdate}" > ${_cijr_transfile} } 200> ${joblist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} myecho --info "- job '${JOB}->${_cijr_atjob_nbr}' (${_cijr_atjob_execdate}) a newer entry exists in job list!" myecho " url: ${_cijr_atjob_url}" # delete outdated job atrm ${JOB} else # a newer job doesn't exist myecho --warn "- job '${JOB}' (${_cijr_atq_date}) entry doesn't exist in job list!" myecho " url: ${_cijr_atjob_url}" fi # grep CRL file name from list file _cijr_tmpfile='' if [ -f ${urllist} ] then { flock -s -w ${max_lock_wait} 200 _cijr_tmpfile=`grep "^${_cijr_atjob_url}" ${urllist} | cut -d'|' -f2` # write output to transfer file echo "_cijr_tmpfile=${_cijr_tmpfile}" > ${_cijr_transfile} } 200> ${urllist_lock} # read data from transfer file . ${_cijr_transfile} rm -f ${_cijr_transfile} fi if [ -n "${_cijr_tmpfile}" ] then _cijr_file="`basename "${_cijr_tmpfile}"`" myecho " file: '${_cijr_file}'" fi # else # # no CRL URL found fi # else # not a CRL update at-job fi done fi rm -f ${_cijr_transfile} EXEC_FUNCTION='===end:check_if_jobs_are_running===' return ${_cijr_flag} } #---------------------------------------------------------------------------------- # show at-job details # # input : $1 - at-job number #---------------------------------------------------------------------------------- show_job_details () { EXEC_FUNCTION='===begin:show_job_details===' _sjd_jobnbr="$1" if [ -n "${_sjd_jobnbr}" ] then # attention: grep followed by a TAB! -> use Perl-style grep _sjd_atq_line=`atq | grep -P "^${_sjd_jobnbr}\t"` if [ $? -eq 0 ] then _sjd_tmpdate=`echo "${_sjd_atq_line}" | tr -s '\t ' ' ' | cut -d' ' -f2-6` _sjd_execdate=`date -d "${_sjd_tmpdate}" +"%Y-%m-%d %H:%M"` myecho "job details ..." myecho "- job number : ${_sjd_jobnbr}" myecho "- execution time: ${_sjd_execdate}" myecho "- command script:" myecho if [ ${force_quiet_run} -eq 0 ] then # be verbose at -c ${_sjd_jobnbr} | sed 's#^# #g' else # output to log file loglabel="`date +"%b %e %T"` `hostname` `basename $0`[${EXEC_INSTANCE}]:" at -c ${_sjd_jobnbr} | sed "s#^#${loglabel} #g" >> ${crllog} fi else myecho --warn "job '${_sjd_jobnbr}' doesn't exist!" fi else myecho --warn "job number missing!" fi EXEC_FUNCTION='===end:show_job_details===' } #---------------------------------------------------------------------------------- # show help #---------------------------------------------------------------------------------- show_help () { EXEC_FUNCTION='===begin:show_help===' echo "Usage:" echo " certs-update-crl --help - show this help" echo echo " certs-update-crl [--quiet] --checkall - check if all CRL files exist" echo echo " certs-update-crl [--quiet] --all - force update of all CRL files" echo " certs-update-crl [--quiet] --single - force update of a single CRL file" echo echo " certs-update-crl [--quiet] --grepsingleuri - grep CRL URL from single" echo " certificate and continue" echo " with check of at-jobs" echo " certs-update-crl [--quiet] --grepuri - grep CRL URLs from certificates" echo " and continue with check of at-jobs" echo echo " certs-update-crl [--quiet] -grepsingleurionly - grep CRL URI from certificate" echo " only" echo " certs-update-crl [--quiet] --grepurionly - grep CRL URLs from certificates" echo " only" echo echo " certs-update-crl [--quiet] --showsingleuri - show URI of a single certificate" echo echo " certs-update-crl [--quiet] --createjobs - create initial at-jobs based on" echo " information fetched from CRLs" echo " certs-update-crl [--quiet] --deletejobs - delete at-jobs based on job list" echo " file" echo " certs-update-crl [--quiet] --checkjobs - check if all at-jobs exist" echo " certs-update-crl [--quiet] --jobdetails - show details of a job" echo " certs-update-crl [--quiet] --listjobs - list all active at-jobs" echo " certs-update-crl [--quiet] --listnextjob - list next scheduled at-job" echo echo " certs-update-crl [-quiet] -searchsingleuri - search certificates which" echo " are referring to given CRL URL" echo " certs-update-crl [-quiet] --searchalluris - search certificates which are" echo " referring to CRL on the active" echo " CRL list" echo EXEC_FUNCTION='===end:show_help===' exit 0 } #================================================================================== # main #================================================================================== EXEC_FUNCTION='===begin:main===' cmd='help' # default action force_single_uri_show=0 # force show of single URL force_single_url_grep=0 # force grep of single URL force_url_grep=0 # force grep of URLs force_crl_rehash=0 # force rehash force_job_create=0 # force creation of at-job list force_logging=0 # force logging to log file force_quiet_run=0 # force quiet run header_line='Certificate revocation list (CRL) handling' certfile='' jobnbr='' crl_type='' crl_url='' openssl_bin=/usr/bin/openssl # command line parameter EXEC_FUNCTION='===begin:get command line parameter===' if [ $# -gt 0 ] then # read parameter(s) while [ $# -gt 0 ] do case $1 in *-all ) # process all CRL URLs cmd='all' force_url_grep=1 force_logging=1 shift ;; *-checkall ) cmd='checkall' force_logging=1 shift ;; *-checkjobs ) cmd='checkjobs' shift ;; *-showsingleuri ) cmd='none' certfile="$2" force_single_uri_show=1 shift; shift ;; *-createjobs ) # create at-jobs only cmd='none' force_job_create=1 force_logging=1 shift ;; *-deletejobs ) cmd='deletejobs' force_logging=1 shift ;; *-listjobs ) cmd='listjobs' shift ;; *-listnextjob ) cmd='listnextjob' shift ;; *-grepsingleuri ) # grep URL from a single certificate and continue with check of at-jobs cmd='checkall' certfile="$2" force_single_url_grep=1 shift; shift ;; *-grepuri ) # grep URLs from certificates and continue with check of at-jobs cmd='checkall' force_url_grep=1 shift ;; *-grepsingleurionly ) # grep URL from a single certificate but don't create at-jobs cmd='none' certfile="$2" force_single_url_grep=1 shift; shift ;; *-grepurionly ) # grep URLs from certificates only but don't create at-jobs cmd='none' force_url_grep=1 shift ;; *-help|*-?|/? ) # show command line parameters echo show_help exit 1 ;; *-jobdetails ) # show at-job details cmd='jobdetails' jobnbr="$2" shift; shift ;; *-searchsingleuri ) # search CRL URL in referring certificates cmd='searchsingleuri' echo "$2" | grep -q -E "^http[s]?://" if [ $? -eq 0 ] then # URI # check if OCSP URI with '!' as a separator # or a normal CRL URI has been provided echo "$2" | grep -q '!' if [ $? -eq 0 ] then crl_type='ocsp' else crl_type='crl' fi crl_url="$2" else # crl file if [ -f ${urllist} ] then crlfile="`basename "${2}" | sed -e 's/\.pem$//' -e 's/\.ocsp$//'`" for CNAME in pem ocsp do if [ -f ${crldir}/${crlfile}.${CNAME} ] then crlfile="${crlfile}.${CNAME}" break fi done grep -E -q "\|${crlfile}" ${urllist} if [ $? -eq 0 ] then # fetch URI from crl list crl_type=`grep -E "\|${crlfile}" ${urllist} | cut -d'|' -f1` crl_url=`grep -E "\|${crlfile}" ${urllist} | cut -d'|' -f2` fi else myecho --error "- file '${urllist}' doesn't exist!" exit 1 fi fi shift; shift ;; *-showalluris|*-searchalluris ) # show all certificates which are on the active CRL list cmd='searchalluris' url='' shift ;; *-single ) # process a single CRL URL cmd='single' force_logging=1 echo "$2" | grep -q '!' if [ $? -eq 0 ] then crl_type='ocsp' else crl_type='crl' fi crl_url="$2" shift; shift ;; *-quiet ) # suppress all screen outputs force_quiet_run=1 shift ;; * ) # skip parameter shift ;; esac done else # show command line parameters echo show_help exit 1 fi EXEC_FUNCTION='===end:get command line parameter===' # print screen header if [ ${force_quiet_run} -eq 0 ] then clrhome mecho --info "${header_line}" mecho else loglabel="`date +"%b %e %T"` `hostname` `basename $0`[${EXEC_INSTANCE}]:" echo "${loglabel} ${EXEC_CMD_LINE}" >> ${crllog} fi if [ ! -f ${joblist} -a "${cmd}" != 'deletejobs' -a "${cmd}" != 'none' ] then # a CRL update job list doesn't exist, force the following activities: # 1. grep URLs from certificates # 2. check if all CRL files exist force_url_grep=1 cmd='checkall' fi if [ ${force_single_uri_show} -eq 1 ] then # show URL from certificate show_single_uri "${certfile}" elif [ ${force_single_url_grep} -eq 1 ] then # read URL from certificate grep_crl_uri_from_single_cert "${certfile}" elif [ ${force_url_grep} -eq 1 ] then # read URLs from certificates grep_crl_uri_from_certs fi case ${cmd} in all ) # force update of all CRL files process_all_crls ;; checkall ) # check if all CRL files exist check_if_crl_files_exist check_if_jobs_are_running ;; checkjobs ) check_if_jobs_are_running ;; deletejobs ) delete_at_jobs ;; jobdetails ) show_job_details "${jobnbr}" ;; listjobs ) list_at_jobs ;; listnextjob ) list_at_jobs 'nextjob' ;; searchsingleuri ) search_crl_uri "${crl_url}" ;; searchalluris ) noheader='' grep -E -v "^#|^ *$|\|ldap[s]?:" ${urllist} | cut -d'|' -f2 | sed 's#|http[s]*:#http[s]?:#g' | sort | \ while read crl_url do search_crl_uri "${crl_url}" "${noheader}" noheader='noheader' echo done ;; showcrluri ) extract_crl_uri ;; single ) # process single CRL file delay_script_exec "${pgmname}.*-single" ${max_lock_wait} if process_single_crl "${crl_type}" "${crl_url}" then force_crl_rehash=1 force_job_create=0 fi ;; none|* ) # do nothing ;; esac if [ ${force_job_create} -eq 1 ] then create_at_jobs fi if [ ${force_crl_rehash} -eq 1 ] then myecho "updating hashes ..." /var/install/bin/certs-update-hashes --quiet --crldir fi if [ ${force_url_grep} -eq 1 ] then myecho --log "finished." else myecho "finished." fi if [ ${force_quiet_run} -eq 0 ] then anykey fi EXEC_FUNCTION='===end:main===' #================================================================================== # end #================================================================================== exit 0