#!/bin/sh
# NAME:		phoenix
# PURPOSE:	workflow tool for auto-processing SCIENCE data with dfos tools
# AUTHOR:	Reinhard Hanuschik
# DATE:		2013-04, developed from autoDaily and reproQC
# VERSION:	1.0 -- 
#		1.0.1- qc1Ingest added for IDP statistics
#		1.0.2- further outdated content added (2013-10-23)
#		1.0.3- hacks to cope with SLICER situation, to be rolled back later (2013-10-28)
#		1.1 -- suppression of 60.A files implemented (2013-11-04)
#		1.2 -- checks for proper DFO_FILE_NAME (2014-02-05)
#		1.3 -- offers handling of 60. files for testing; CHECK_LOCAL for mcalibs; ANC/merge2Fits obsolete; sorting by RAW_TYPEs for INT (2014-08-20)
#		1.4 -- new: QC job concentration, CHECK_LOCAL and CAL_DIR_LOCAL for muse-like installations (2015-01-12)
#		2.0 -- new: MCALIB mode, extended workflow and configuration (2015-03-05)
#		2.1 -- DATA_PGI, JOB_PGI; modified logic for filterRaw; bug fixes for MCALDOWN files, QCBQS_TYPE, writeStats (2015-10-01)
#		2.1.1- RAWDOWN_SLEEP added (2015-10-15)
#		2.2 -- new flag -n to skip CLEANUP in step -M; optional SCIENCE certification step CERTIF_REQUIRED (2016-01-29)
#		2.2.1- TOT_TIME1 for QC execution catched for case SER (MUSE); monthly mode disabled for MUSE (2016-02-02)
#		2.3 -- supports MASTER/SLAVE operational model (2016-04-01)
#		2.3.1- added check for DATE being a real date (e.g. 2016-04-31), lines :170 and :1290 (2016-05-30)
#		3.0 -- supports DEEP; supports target AB selection for DEEP; CHECK_LOCAL and CAL_DIR_LOCAL disabled (2016-12-05)
#		3.0.1- error condition added for non-existing run_IDs in -M mode (2017-03-14)
#		3.1 -- CALIB as DP_CATG accepted; optional non-std resource file in config file accepted (2017-04-03)
#		3.1.1- minor improvements for the sequence numbering for QCJOB_CONCENTRATION; procedure for pseudo-dates, to prepare for P100 (2017-05-04)
#		3.1.2- added CERTIF_PROCATG for the CERTIF_REQUIRED option; cleaned section :2191 from uves-related content; section for ORIG_AB_DIR improved; bug fix for TOT_TIME2 and DEEP_MODE in line :652 (2017-07-03)
#		3.1.3- new optional monitor PGI MON_PGI (called after phoenixMonitor) (2017-07-12)
#		3.1.4- small bug fixes for QCJOB_CONCENTRATION=YES around lines :1343 and :1483 (2017-10-02)
#		3.2 -- improved handling of TARGET_SELECT=YES in DEEP mode (2017-12-27)
#		3.2.1- enabled for for P100+ runIDs (sect. 0.3.1 and 4.3); updates call of phoenixMonitor in JOBS_INGEST (2018-04-23)
#		3.2.2- some modifications done for TOT_TIME1 stats for step2/stacking; new key AB_FILTER_COMMENT (optional, used for getStatusAB) (2018-06-01)
# 		3.2.3- minor bug fixes for MCAL_MODE; added config key ACCEPT_NOSCORE for MCAL_MODE (2019-01-17)
#		3.3 -- new option -x, for exceptionally enforcing keeping the AB_DIR/HISTO info for the MCAL_MODE (2019-02-25)
#		4.0 -- support COMPLETE mode; cascadeMonitor removed; adapted to new runID scheme (2020-02-06)
#		4.0.1- some minor stabilization for COMPLETE_MODE (2020-03-27)
#		4.1 -- support SCIENCE AB creation instead of download (2020-10-22)
#		4.1.1- DP_CATG=ANY: bug fix for CREATE_SCIAB=YES (2020-11-23)
#		4.1.2- bug fix for NUM_AB_REJ in :2491 (2021-01-18)
#		4.1.3- different call of MON_PGI dependent on DEEP_MODE (BWo, 2022-07-13)
#		4.2 -- support both DFOS-v1 and qcFlow naming schemes for MCALs (2023-06-15)
#
# CONFIG:	config.phoenix (and second config.phoenix file for MCALIB mode); 
#		reads also config.createAB for DRS_TYPE and N_VCAL_LIST (mode -X),
#		config.processQC for QCBQS_TYPE
# USED TOOLS:	ngasClient, createJob plus other dfos tools
# COMMENTS:	This tool uses the dfos tool suite.
#		It requires its own workspace plus ins-specific modifications.
#		Tool is *not* enabled for parallel execution.
#		60.A files are suppressed unless ACCEPT_060=YES (or MCALIB production).
#		Support for DEEP mode (DEEP_MODE=YES), organized by run_ID,
#		separate from normal PHOENIX by -r and different config.phoenix; but better don't call on the same account in mixed mode
#		Target selection: by specifying -a <dpc ab>. Helps in cases of very large runs. If you call -P -a <...> then 
#		finish with -M -a <...> before calling the next AB.
# NOTES: 	around :2495: call calselector without certif_flag; needed?
#		createPseudoDate: prepared for new runID scheme P105+; to be verified!
# =========================================================================
TOOL_NAME=phoenix
TOOL_VERSION=4.2

# =========================================================================
# 0.1 get options
# =========================================================================

CREATE_ONLY=NO
PROCESS_ONLY=NO
MOVEP_ONLY=NO
PROCESS_PART=NO
SUPPRESS_YN=NO
MCAL_MODE=NO
CLEANUP=YES
OVERRIDE_HISTO=YES
export TARGET_SELECT=NO
export DEEP_MODE=NO

MONTHLY_MODE=NO	#-R: hidden technical param. for the tool to know whether in monthly mode or not

if [ Q$THIS_IS_PHOENIX != QYES ]
then
	echo "***ERROR: This is not a phoenix installation. Exit."
	exit
fi

SCP_TIMEOUT="ConnectTimeOut=1"         # for scp from qcweb

while getopts a:d:m:r:Y:CMPRXnpqsxhv OPTION
do
	case "$OPTION" in
	d ) DATE=$OPTARG ;;
	m ) MONTH=$OPTARG ;;
	r ) RUN_ID=$OPTARG ; export DEEP_MODE=YES;;
	a ) export DPC_AB=$OPTARG ; export TARGET_SELECT=YES ;;
# workflow options:
	C ) CREATE_ONLY=YES ;;
	M ) MOVEP_ONLY=YES ;;
	P ) PROCESS_ONLY=YES ;;
# internal use:
	n ) CLEANUP=NO ;;
	p ) PROCESS_PART=YES ;;
	q ) QCJOB_CONCENTRATION=YES ;;
	s ) SUPPRESS_YN=YES ;;
	x ) OVERRIDE_HISTO=NO ;;
	R ) MONTHLY_MODE=YES ;;
	X ) MCAL_MODE=YES ;;
	Y ) NEW_SIZE=$OPTARG ;;
# help and version:
	h ) cat $DFO_DOC_DIR/phoenix.h | more ; exit ;;
	v ) echo "$TOOL_VERSION"; exit ;;
	? ) cat $DFO_DOC_DIR/phoenix.h ; exit ;;
	esac
done

# be tolerant for RUN_ID with or without ()
if [ Q$RUN_ID != Q ]
then
	export RUN_ID=`echo $RUN_ID | sed "s/(//" | sed "s/)//"`
fi

# workspace definition, different from $RELEASE
if [ ! -s $DFO_CONFIG_DIR/config.phoenix ]
then
	echo "***ERROR: no \$DFO_CONFIG_DIR/config.phoenix found, can't do. Exit."
	exit -1
fi
CONFIG=$DFO_CONFIG_DIR/config.phoenix 

if [ ! -s $DFO_CONFIG_DIR/OCA/config.createAB ]
then
	echo "***ERROR: no \$DFO_CONFIG_DIR/OCA/config.createAB found, can't do. Exit."
	exit -1
fi

FILTER_RAW=`grep "^FILTER_RAW" $DFO_CONFIG_DIR/OCA/config.createAB | awk '{print $2}'`
if [ Q$FILTER_RAW != QYES ]
then
	FILTER_RAW=NO
fi

# Check for non-standard resource file
OTHER_RESOURCE=`grep "^OTHER_RESOURCE"	$CONFIG | awk '{print $2}'`
if [ Q$OTHER_RESOURCE != Q ] 
then
	if [ ! -s $HOME/$OTHER_RESOURCE ]
	then
		echo "***ERROR: OTHER_RESOURCE $OTHER_RESOURCE is defined but could not be found. Exit."
		exit -1
	fi
	source $HOME/$OTHER_RESOURCE
fi

# for calling moveProducts in PHOENIX environment
export CALLED_BY_PHOENIX=YES

# consistency check: if MCAL_CONFIG set, but tool called without -X: refuse
# if MCAL_CONFIG not set but tool called with -X: refuse

# check if MCAL_MODE is set; if so, switch to other config file
MCAL_CONFIG=`grep "^MCAL_CONFIG"	$CONFIG | awk '{print $2}'`
if [ $MCAL_MODE = NO ] && [ Q$MCAL_CONFIG != Q ]
then
	echo "***ERROR: You cannot call the tool without option -X, $CONFIG is configured for \$MCAL_MODE = YES. Exit."
	exit -1
fi

if [ $MCAL_MODE = YES ]
then
	CONFIG_FILE=`grep "^MCAL_CONFIG" 	$CONFIG | awk '{print $2}'`
	if [ Q$CONFIG_FILE = Q ]
	then
		echo "***ERROR: You cannot call the tool in option -X. Exit."
		exit -1
	fi

	if [ ! -s $DFO_CONFIG_DIR/$CONFIG_FILE ]
	then
		echo "***ERROR: no $CONFIG_FILE found, can't do. Exit."
		exit -1
	else
		CONFIG=$DFO_CONFIG_DIR/$CONFIG_FILE
		MCAL_RESOURCE=`grep "^MCAL_RESOURCE"	$CONFIG | awk '{print $2}'`
		if [ Q$MCAL_RESOURCE = Q ]
		then
			echo "***ERROR: no \$MCAL_RESOURCE defined, can't do. Exit."
			exit -1
		elif [ ! -s $HOME/$MCAL_RESOURCE ]
		then
			echo "***ERROR: $MCAL_RESOURCE defined but not existing. Exit."
			exit -1
		else
			source $HOME/$MCAL_RESOURCE
			if [ Q$MONTH != Q ]
			then
				echo "*** INFO: Tool running in mcalib mode, using $CONFIG_FILE and $MCAL_RESOURCE."
			fi
		fi
	fi
elif [ $OVERRIDE_HISTO = NO ]
then
	echo "*** ERROR: you can call the option -x only in mcalib mode (together with option -X). Exit."
	exit -1
fi

# check for DEEP_CONFIG
if [ $DEEP_MODE = YES ]
then
	DEEP_CONFIG=`grep "^DEEP_CONFIG"	$CONFIG | awk '{print $2}'`
	if [ Q$DEEP_CONFIG = Q ]
	then
		echo "***ERROR: you cannot call the tool with option -r (DEEP mode), no key DEEP_CONFIG configured. Exit."
		exit -1
	fi

	if [ ! -s $DFO_CONFIG_DIR/$DEEP_CONFIG ]
	then
		echo "***ERROR: no $DEEP_CONFIG found, can't do. Exit."
		exit -1
	else
		CONFIG=$DFO_CONFIG_DIR/$DEEP_CONFIG
	fi 

	DEEP_RESOURCE=`grep "^DEEP_RESOURCE" $CONFIG | awk '{print $2}'`
	if [ Q$DEEP_RESOURCE = Q ]
	then
		echo "***INFO: no \$DEEP_RESOURCE defined, we use the normal .dfosrc."
	elif [ ! -s $HOME/$DEEP_RESOURCE ]
	then
		echo "***ERROR: $DEEP_RESOURCE defined but not existing. Exit."
		exit -1
	else
		source $HOME/$DEEP_RESOURCE
		echo "*** INFO:    Tool running in DEEP mode, using $CONFIG and $DEEP_RESOURCE."
		if [ $TARGET_SELECT = YES ]
		then
			if [ $MOVEP_ONLY = YES ]
			then
				echo "*** WARNING: Target selection mode for option -M is not strictly supported, see dialogue further down. Hit return:"
				read input
			else
				echo "*** INFO:    Tool running in target selection mode, selected is $DPC_AB."
			fi

			CHECK_DPC=`echo $DPC_AB | grep dpc`
			if [ Q$CHECK_DPC = Q ]
			then
				echo "*** ERROR:   Target selection mode can only work on 'deep combined' (dpc) ABs. Exit."
				exit -1
			fi
		fi
	fi
fi

# find PROC_INSTRUMENT
# assumption: user has logged in with proper .dfosrc already; then $CONFIG is found,
# and $PROC_INSTRUMENT is used to check whether this is the proper instrument.
PROC_INSTRUMENT=`grep "^PROC_INSTRUMENT" $CONFIG | awk '{print $2}'`

export PATH=$PATH:$DFO_BIN_DIR

if [ ! -d $TMP_DIR ]
then
	mkdir $TMP_DIR
fi

if [ Q$DATE != Q ]
then
	echo "======================================================================================================"
	echo "			phoenix for $DATE started ..."
fi

ORIG_LST_DIR="public_html/${PROC_INSTRUMENT}/REPORT"
# check for COMPLETE_MODE: this mode intended to be driven by SCIENCE, but reprocessing all required mcalibs as listed in ABs --> no certif needed, not even QC reports
COMPLETE_MODE=`grep "^COMPLETE_MODE"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`
if [ Q$COMPLETE_MODE != QYES ]
then
        COMPLETE_MODE=NO
fi

if [ $COMPLETE_MODE = YES ] && [ Q$DATE != Q ]
then
# check if applicable
	COMPLETE_DATE=`grep "^COMPLETE_DATE" 		$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`   # check if DATE is earlier than COMPLETE_DATE, otherwise turn off
	if [ Q$COMPLETE_DATE = Q ]
	then
		echo "***ERROR: no COMPLETE_DATE configured, can't do. Exit."
		exit -1
	else
		DATE1=`echo $DATE | sed "s/-//g"`
		COMPLETE_DATE1=`echo $COMPLETE_DATE | sed "s/-//g"`
		if [ $DATE1 -gt $COMPLETE_DATE1 ]
		then
			echo "			$DATE is later than configured COMPLETE_DATE=$COMPLETE_DATE, COMPLETE_MODE set to NO." | tee -a $PLOG
			COMPLETE_MODE=NO
		else
			echo "			$DATE is earlier than configured COMPLETE_DATE=$COMPLETE_DATE, COMPLETE_MODE=YES." | tee -a $PLOG
		fi
	fi
fi

if [ $COMPLETE_MODE = YES ]
then
	if [ $CREATE_ONLY = NO ] && [ $PROCESS_ONLY = NO ] && [ $MOVEP_ONLY = NO ]
	then
		echo "***ERROR: in COMPLETE_MODE, you can call the tool only with options -C, -P, or -M."
		exit
	fi
fi

# export to all pgi's
export COMPLETE_MODE=$COMPLETE_MODE

if [ $COMPLETE_MODE = YES ]
then
# get other params for COMPLETE_MODE=YES
	COMPLETE_CALIBQC=`grep "^COMPLETE_CALIBQC"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`	# if YES, we create new QC plots and ingest into QC1 table
	CAL_ORIG_AB_DIR=`grep "^CAL_ORIG_AB_DIR"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`	# original AB_DIR for historical CALIB QC info 
	CAL_ORIG_PLT_DIR=`grep "^CAL_ORIG_PLT_DIR"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`	# original PLT_DIR for historical CALIB QC info 
	CREATE_TXT_FILE=`grep "^CREATE_TXT_FILE"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`	# YES: txt files can be useful but take long

	if [ Q$COMPLETE_CALIBQC != QYES ]
	then
		COMPLETE_CALIBQC=NO
	fi
	export COMPLETE_CALIBQC=$COMPLETE_CALIBQC	# for JOB_PGI
fi

if [ $COMPLETE_MODE = YES ] && [ Q$DATE != Q ]
then
# decide if this date qualifies for HISTO_AB (the default) or for RAW_HDR (special, source for SCIENCE files)
# either specific for this DATE:
	RAWFILE_SOURCE=`grep "^RAWFILE_SOURCE"		$CONFIG | grep "$DATE" | awk '{print $3}'`
	if [ Q$RAWFILE_SOURCE != QRAW_HDR ]
	then
# ... or for the entire MONTH
		CHECK_MONTH=`echo $DATE | cut -c1-7`
		RAWFILE_SOURCE=`grep "^RAWFILE_SOURCE"	$CONFIG | grep "[[:space:]]${CHECK_MONTH}[[:space:]]" | awk '{print $3}'`
		if [ Q$RAWFILE_SOURCE != QRAW_HDR ]
		then
# ... or for the entire YEAR
			CHECK_YEAR=`echo $DATE | cut -c1-4`
			RAWFILE_SOURCE=`grep "^RAWFILE_SOURCE"	$CONFIG | grep "[[:space:]]${CHECK_YEAR}[[:space:]]" | awk '{print $3}'`
# still nothing found? Then it's the default, HISTO_AB
			if [ Q$RAWFILE_SOURCE != QRAW_HDR ]
			then
				RAWFILE_SOURCE=HISTO_AB
			fi
		fi
	fi
fi

# checks for DATE format
if [ Q$DATE != Q ] && [ $DEEP_MODE = NO ]
then
	CHECK=`echo $DATE | wc -c`
	if [ $CHECK != 11 ]
	then
       		echo "***ERROR: wrong date format: $DATE; should be: 2004-04-04; start again."
        	exit -1
	fi

# check if DATE is a real civil_date (ex for DEEP_MODE where we know it's a dummy)
	CHECK_REAL=`qcdate $DATE`
	CHECK_REAL1=`qcdate $CHECK_REAL`
	if [ $DATE != $CHECK_REAL1 ]
	then
		echo "***ERROR: $DATE does not exist; start again."
		exit -1
	fi
fi

if [ Q$MONTH != Q ]
then
# for certain instruments processing in MONTH mode is too big; add them here as appropriate
	if [ $PROC_INSTRUMENT = MUSE ]
	then
		echo "*** WARNING: monthly mode is disabled for $PROC_INSTRUMENT."
		exit
	fi

	CHECK=`echo $MONTH | wc -c`
	if [ $CHECK != 8 ]
	then
       		echo "***ERROR: wrong month format: $MONTH; should be: 2004-04; start again."
        	exit -1
	fi
fi

if ([ $CREATE_ONLY = YES ] && ([ $MOVEP_ONLY = YES ] || [ $PROCESS_ONLY = YES ])) || ([ $MOVEP_ONLY = YES ] && [ $PROCESS_ONLY = YES ] )
then
	echo "***ERROR: you cannot combine any of -C|-P|-M. Exit."
	exit
fi

if [ $CREATE_ONLY = YES ] || [ $MOVEP_ONLY = YES ] || [ $PROCESS_ONLY = YES ]
then
	FULL_MODE=NO
else
	FULL_MODE=YES
fi

# ACCEPT_060
# MCAL_MODE=YES: always
# MCAL_MODE = NO: configuration
if [ $MCAL_MODE = YES ]
then
	ACCEPT_060=YES  #always accepted in MCAL_MODE
else
	ACCEPT_060=`grep "^ACCEPT_060" $CONFIG | awk '{print $2}'`
	if [ Q$ACCEPT_060 != QYES ]
	then
		ACCEPT_060=NO
	fi
fi

# for $MCAL_MODE = YES: daily mode only after confirmation (otherwise might create confusion)
if [ $MCAL_MODE = YES ] && [ Q$MONTH = Q ] && [ $MONTHLY_MODE = NO ] && [ $MOVEP_ONLY = NO ]
then
	echo "You have called the tool in daily mode for option -X. The usual way is to call in monthly mode, 
to have the association pool filled properly. The daily mode could make sense for debugging or in special situations.
Confirm that you want to continue (Y/N) (N):"
	read CONFIRM
	if [ Q$CONFIRM != QY ]
	then
		echo "Think about it."
		exit
	fi
fi

# QCJOB_CONCENTRATION: only checked for $MONTH
if [ Q$MONTH != Q ]
then
	QCJOB_CONCENTRATION=`grep "^QCJOB_CONCENTRATION"	$CONFIG | awk '{print $2}'`
fi

if [ Q$QCJOB_CONCENTRATION != QYES ]
then
	QCJOB_CONCENTRATION=NO
fi

# CERTIF_REQUIRED: only checked for IDPs
if [ $MCAL_MODE = NO ]
then
	CERTIF_REQUIRED=`grep "^CERTIF_REQUIRED"	 $CONFIG | awk '{print $2}'`
	if [ Q$CERTIF_REQUIRED != QYES ]
	then
		CERTIF_REQUIRED=NO
	fi

	if [ Q$CERTIF_REQUIRED = QYES ] && [ $FULL_MODE = YES ]
	then
		echo "*** ERROR: you cannot call the tool without option -C/-P/-M since you have configured CERTIF_REQUIRED=YES. Check out."
		exit
	fi

	if [ Q$CERTIF_REQUIRED = QYES ] && ([ $MONTHLY_MODE = YES ] || [ Q$MONTH != Q ])
	then
		echo "*** ERROR: you cannot call the tool in monthly mode since you have configured CERTIF_REQUIRED=YES. Check out."
		exit
	fi

	if [ Q$CERTIF_REQUIRED = QYES ]
	then
		CERTIF_PGI=`grep "^CERTIF_PGI" 	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
		if [ Q$CERTIF_PGI = Q ]
		then
			echo "*** ERROR: you have to configure \$CERTIF_PGI. Check out."
			exit
		fi

		if [ ! -s $DFO_BIN_DIR/$CERTIF_PGI ]
		then
			echo "*** ERROR: $DFO_BIN_DIR/$CERTIF_PGI not found. Check out."
			exit
		fi

		CERTIF_PROCATG=`grep "^CERTIF_PROCATG" $CONFIG | grep "${PROC_INSTRUMENT}" | awk '{print $3}'`
		if [ Q$CERTIF_PROCATG = Q ] && [ $DEEP_MODE = YES ]
		then
			echo "*** ERROR: you have to specify \$CERTIF_PROCATG since CERTIF_REQUIRED=YES and DEEP_MODE=YES. Check out."
			exit
		fi
	fi

# CERTIF_REQUIRED=NO --> the prod_comm files are automatically filled, e.g. by pgi_phoenix_..._postQC, so we should start filling them from scratch
fi

if [ $MCAL_MODE = YES ]
then
	REPLACE_LOGIC=`grep "^REPLACE_LOGIC" 	$CONFIG | awk '{print $2}'`
	if [ Q$REPLACE_LOGIC != QSELECTIVE ]
	then
		REPLACE_LOGIC=COMPLETE
	fi

	OCA_SCHEME=`grep "^OCA_SCHEME"		$CONFIG | awk '{print $2}'`
	if [ Q$OCA_SCHEME != QVERSIONED ]
	then
		OCA_SCHEME=STANDARD
	fi

	RAWDOWN_SLEEP=`grep "^RAWDOWN_SLEEP"    $CONFIG | awk '{print $2}'`
        if [ Q$RAWDOWN_SLEEP = Q ]
        then
                RAWDOWN_SLEEP=60
        fi
fi

HDR_DOWN=`grep "^HDR_DOWN"	$CONFIG | awk '{print $2}'`
if [ Q$HDR_DOWN != QYES ]
then
	HDR_DOWN=NO
fi

DP_CATG=`grep "^DP_CATG"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
if [ Q$DP_CATG = Q ]
then
	DP_CATG=SCIENCE
fi

CREATE_SCIAB=NO	# create SCIENCE ABs instead of downloading them

# ORIG_AB_DIR:  mandatory if MCAL_MODE=NO
# 		optional for MCAL_MODE=YES, to catch certification information
# note: we assume here that changes in ORIG_AB_DIR happen only at the end/begin of months. If this is not true, the ORIG_AB_DIR must be adapted.
if [ $MCAL_MODE = NO ] && [ $DEEP_MODE = NO ]
then
	grep "^ORIG_AB_DIR"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3,$4}' | sort -u -k2 > $TMP_DIR/ph_ORIG_AB_DIR
	if [ -s $TMP_DIR/ph_ORIG_AB_DIR ]
	then
		for REF in `cat $TMP_DIR/ph_ORIG_AB_DIR | awk '{print $2}'`
		do
			REF1=`echo $REF | sed "s/-//g"`	
			if [ Q$DATE != Q ]
			then
				DATE1=`echo $DATE | sed "s/-//g"`
			else
				MONTH1=`echo $MONTH | sed "s/-//g"`
				DATE1=`echo ${MONTH1}01`
			fi
	
			if [ $DATE1 -le $REF1 ]
			then
				ORIG_AB_DIR=`grep $REF $TMP_DIR/ph_ORIG_AB_DIR | awk '{print $1}'`
				break
			fi
		done
	else
		echo "\$ORIG_AB_DIR doesn't seem to be configured properly in $CONFIG. Exit."
		exit -1
	fi

# DFOS_NOAB_DATE: from that date on no SCIENCE ABs exist in DFOS directory on qcweb
	DFOS_NOAB_DATE=`grep "^DFOS_NOAB_DATE"	$CONFIG | awk '{print $3}'`
	if [ "Q$DFOS_NOAB_DATE" = Q ]
	then
		DFOS_NOAB_DATE=2020-12-01	
# if DATE is after that date, SCIENCE ABs don't exist on qcweb any longer,
				# unless they have been created in an earlier processing event
				# this is the default DATE, can be overwritten by a configured value
				# applicable only if ORIG_AB_DIR=public_html/${DFO_INSTRUMENT}/logs, see below
	fi

	if [ $COMPLETE_MODE = NO ] && [ Q$DATE != Q ]
	then
		DATE1=`echo $DATE | sed "s/-//g"`
		DFOS_NOAB_DATE1=`echo $DFOS_NOAB_DATE | sed "s/-//g"`

# if ORIG_AB_DIR != public_html/${DFO_INSTRUMENT}/logs: AB source is from a previous release ad not from DFOS_OPS
		if [ $DATE1 -ge $DFOS_NOAB_DATE1 ] && [ Q$ORIG_AB_DIR = "Qpublic_html/${DFO_INSTRUMENT}/logs" ]
		then
			CREATE_SCIAB=YES
			CREATE_TXT_FILE=`grep "^CREATE_TXT_FILE"	$CONFIG | grep "$PROC_INSTRUMENT" | awk '{print $3}'`	# YES: txt files can be useful but take long
		fi
	fi

# DEEP_MODE; no support for versioning
elif [ $DEEP_MODE = YES ]
then
	ORIG_AB_DIR=`grep "^ORIG_AB_DIR"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
fi

if [ Q$ORIG_AB_DIR = Q ] && [ $MCAL_MODE = NO ]
then
	echo "Selected DATE $DATE doesn't seem to be included in configured time ranges for \$ORIG_AB_DIR. Check and try again. Exit."
	exit
fi
export ORIG_AB_DIR=$ORIG_AB_DIR

# COMPLETE_MODE=YES: versioned OCA rules for CALSELECTOR and DFOS
# with v4.1, also needed for normal IDPs if CREATE_SCIAB=YES
if [ $COMPLETE_MODE = YES ] || [ $CREATE_SCIAB = YES ]
then
# for CALSELECTOR 
	grep "^CALSEL_OCA_RULE"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3,$4}' | sort -u -k2 > $TMP_DIR/ph_CALSEL
	if [ -s $TMP_DIR/ph_CALSEL ]
	then
		for REF in `cat $TMP_DIR/ph_CALSEL | awk '{print $2}'`
		do
			REF1=`echo $REF | sed "s/-//g"`	
			if [ Q$DATE != Q ]
			then
				DATE1=`echo $DATE | sed "s/-//g"`
			else
				MONTH1=`echo $MONTH | sed "s/-//g"`
				DATE1=`echo ${MONTH1}01`
			fi
	
			if [ $DATE1 -le $REF1 ]
			then
				CALSEL_OCA_RULE=`grep $REF $TMP_DIR/ph_CALSEL | awk '{print $1}'`
				CALSEL_OCA_RULE="$DFO_CONFIG_DIR/CALSELECTOR/$CALSEL_OCA_RULE"
				break
			fi
		done
	else
		echo "\$CALSEL_OCA_RULE doesn't seem to be configured properly in $CONFIG. Please provide at least one entry."	
		exit -1
	fi

	if [ ! -s $CALSEL_OCA_RULE ]
	then
		echo "CALSEL_OCA_RULE $CALSEL_OCA_RULE doesn't seem to exist. Exit."
		exit -1
	fi

# for DFOS OCA rules
	grep "^DFOS_OCA_RULE"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3,$4}' | sort -u -k2 > $TMP_DIR/ph_DFOS
	if [ -s $TMP_DIR/ph_DFOS ]
	then
		for REF in `cat $TMP_DIR/ph_DFOS | awk '{print $2}'`
		do
			REF1=`echo $REF | sed "s/-//g"`	
			if [ Q$DATE != Q ]
			then
				DATE1=`echo $DATE | sed "s/-//g"`
			else
				MONTH1=`echo $MONTH | sed "s/-//g"`
				DATE1=`echo ${MONTH1}01`
			fi
	
			if [ $DATE1 -le $REF1 ]
			then
				DFOS_OCA_RULE=`grep $REF $TMP_DIR/ph_DFOS | awk '{print $1}'`
				DFOS_OCA_RULE="$DFO_CONFIG_DIR/OCA/$DFOS_OCA_RULE"
				break
			fi
		done
	else
		echo "\$DFOS_OCA_RULE doesn't seem to be configured properly in $CONFIG. Can't do. Please provide at least one entry."	
		exit -1
	fi

	if [ ! -s $DFOS_OCA_RULE ]
	then
		echo "DFOS_OCA_RULE $DFOS_OCA_RULE doesn't seem to exist. Exit."
		exit -1
	fi

	cp $DFOS_OCA_RULE $DFO_CONFIG_DIR/OCA/${DFO_INSTRUMENT}_association.h	
fi

# by definition, for CREATE_SCIAB=YES the RAWFILE_SOURCE is RAW_HDR
if [ $CREATE_SCIAB = YES ]
then
	RAWFILE_SOURCE=RAW_HDR
fi

# =========================================================================
# 0.2 Initialize
# =========================================================================

if [ $# = 0 ]
then
	cat $DFO_DOC_DIR/phoenix.h
	exit
fi

case $MCAL_MODE in
	"NO"  ) MODE=SCIENCE ;;
	"YES" ) MODE=CALIB ;;
esac

case $DEEP_MODE in
	"NO"  ) PLOG="$TMP_DIR/phoenix.log" ;;
	"YES" ) PLOG="$TMP_DIR/phoenix_deep.log" ;;
esac

if [ $DEEP_MODE = NO ] 
then
	AB_LIST=AB_list_${MODE}_${DATE}
fi

# MCAL_MODE=YES: the monthly AB_list is the reference for the monthly processing and daily moveP
if [ $MCAL_MODE = YES ] && [ Q$MONTH = Q ] && [ $MONTHLY_MODE = YES ]
then
	RMONTH=`echo $DATE | cut -c1-7`			# reference
	AB_LIST1=AB_list_${MODE}_${RMONTH}_BATCH	# for later usage in MOVEP
	cp $DFO_MON_DIR/$AB_LIST $DFO_MON_DIR/$AB_LIST1 
fi

# check for RELEASE being defined
RELEASE=`grep "^RELEASE" $CONFIG | grep "[[:space:]]${PROC_INSTRUMENT}[[:space:]]" | awk '{print $3}'`
if [ Q$RELEASE = Q ]
then
	echo "***ERROR: you must configure a RELEASE in order to separate logs on qcweb from the instrumental logs. Exit."
	exit
elif [ $RELEASE = $DFO_INSTRUMENT ]
then
	echo "***ERROR: RELEASE must differ from $DFO_INSTRUMENT in order to separate logs on qcweb from the instrumental logs. Exit."
	exit
fi

INSTR_DIR="/home/qc/public_html/${RELEASE}" 
STAT_FILE=$DFO_MON_DIR/PHOENIX_DAILY_$RELEASE

if [ ! -s $STAT_FILE ]
then
	echo "***INFO: no $STAT_FILE found. We create a new one, check its content."
	cat > $STAT_FILE <<EOT
# This is the statistics file for the $RELEASE IDP project.
# It is filled from `whoami`.
EOT
fi

MAIL_YN=`grep "^MAIL_YN"	$CONFIG | awk '{print $2}'`
if [ Q$MAIL_YN != QNO ]
then
	MAIL_YN=YES
fi

# for moveP handling of pre-existing product directory
UPDATE_YN=`grep "^UPDATE_YN"	$CONFIG | awk '{print $2}'`
if [ Q$UPDATE_YN != QNO ]
then
	UPDATE_YN=YES
fi

DATA_PGI=`grep 	"^DATA_PGI"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
AB_PGI=`grep 	"^AB_PGI" 	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
HDR_PGI=`grep 	"^HDR_PGI" 	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
JOB_PGI=`grep 	"^JOB_PGI" 	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
MON_PGI=`grep 	"^MON_PGI" 	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`

CHECK_VCAL=`grep "^CHECK_VCAL"	 $CONFIG | awk '{print $2}'`

if [ ! -s $DFO_BIN_DIR/$JOB_PGI ]
then
	echo "ERROR: \$JOB_PGI $JOB_PGI defined in config file but not found under $DFO_BIN_DIR. Check and start again."
	exit
fi

if [ $MCAL_MODE = NO ]
then
	IS_SLAVE=`grep "^IS_SLAVE"	$CONFIG |  awk '{print $2}'`
	if [ Q$IS_SLAVE = QYES ]
	then
		MASTER_ACCOUNT=`grep "^MASTER_ACCOUNT"	$CONFIG |  awk '{print $2}'`
		if [ Q$MASTER_ACCOUNT = Q ]
		then
			echo "***ERROR: if IS_SLAVE=YES, you must define the \$MASTER_ACCOUNT. Exit."
			exit
		fi

		SLV_ROOT=`grep "^SLV_ROOT"		$CONFIG |  awk '{print $2}'`
		MST_ROOT=`grep "^MST_ROOT"		$CONFIG |  awk '{print $2}'`
		if [ Q$SLV_ROOT = Q ] || [ Q$MST_ROOT = Q ]
		then
			echo "***WARNING: you should configure \$SLV_ROOT and \$MST_ROOT since otherwise editing of html files goes wrong."
		fi
		SLAVE_ACCOUNT=`whoami`@`hostname`
	else
		IS_SLAVE=NO
	fi
else
	#BWo:
	IS_SLAVE=NO
fi

# =========================================================================
# 0.3 procedures
# 0.3.1 createPseudoDate: runID --> pseudo DATE, for DEEP mode
# =========================================================================
# For DEEP_MODE, we need a dummy DATE which is used as PSEUDO-DATE for all tools that expect the DATE format 2010-01-31. 
# Although this is called DATE, its only expected properties are a) 10 chars long, b) nnnn-mm-pp. The tool qcdate is never 
# exposed to it (it would fail). We bring all different runID formats into the pseudo-date format:
# SV = 60., old scheme P100-, old scheme P100+, new scheme P105+. All but the new schemes have numbers only. The new scheme has
# mixed chars and numbers.

# Old formats:
# The significant part of RUN_ID (e.g. ppp...nnnnm) is ppp, nnnn and m which makes nicely up for a dummy date: mppp-nn-nn.
# Only if the RUN tag is beyond J we have a problem. Then we might want to restart with A, since all of this is only for avoiding 
# overlaps between two/a few scheduled deep runs, so this is hopefully "good enough".
# For P100+ runs, e.g. 0100.B-0688B, we drop the second p in pppp (it is always 1) and then apply the same rule as before.
# 0100.B-0607B becomes 2000-06-07.
# New format:
# For P105+ runs, e.g. 105.20B3.002, we transform the run 002 into 2 (dropping the 00), add it as first digit, and derive 2105-20-B3 as pseudo-date.
# This works fine up to run 009. Run 10 and beyond will loose the 1 and start with 0. Unclear what to do then.  

# In order to not confuse this pseudo-date with a non-deep (real) date 2000-06-07, we 
# a) work in a DEEP workspace that is different from a non-DEEP (conventional) workspace;
# b) have in the AB CIVIL_DATE (the classical DATE) and DATE (the pseudo-date translated from the runID). Also the OBS_PROG_ID is kept in the AB.

createPseudoDate() {
RUN_ID1=$1
LENGTH=`echo $RUN_ID1 | wc -c`
LENGTH1=`echo $RUN_ID1 | sed "s/\.//g" | wc -c`	# distinguish old (one '.') and new scheme (two '.')
NEW=NO

case $LENGTH in
 "11" ) RUN_ID1=0$1 ;;  			# SV: 60.
 "12" ) : ;;  					# old scheme P100-
 "13" ) case $LENGTH1 in
	"12" )
		FIRST1=`echo $RUN_ID1 | cut -c1,1`	# old scheme P100+
		REST=`echo $RUN_ID1 | cut -c3-13`
		RUN_ID1=`echo ${FIRST1}${REST}` ;;
	"11" )
		NEW=YES					# new scheme P105+
		FIRST1=`echo $RUN_ID1 | cut -c1-3`
		REST=`echo $RUN_ID1 | cut -c4-13`
		RUN_ID1=`echo ${FIRST1}${REST}` ;;
	esac
esac

# old scheme
if [ $NEW = NO ]
then
	FIRST=`echo $RUN_ID1 | cut -c11,11`
	case $FIRST in
   A ) FIRST=1;;
   B ) FIRST=2;;
   C ) FIRST=3;;
   D ) FIRST=4;;
   E ) FIRST=5;;
   F ) FIRST=6;;
   G ) FIRST=7;;
   H ) FIRST=8;;
   I ) FIRST=9;;
   J ) FIRST=0;;
   * ) FIRST=X;;
	esac

	SCND=`echo $RUN_ID1 | cut -c1-3`
	THRD=`echo $RUN_ID1 | cut -c7,8`
	FRTH=`echo $RUN_ID1 | cut -c9,10`

# NEW scheme
else
# for all practical purpose, we assume there are no more than 10 DEEP runs per programme (in theory there could be 999)
        FIRST=`echo $RUN_ID1 | cut -c12,12`
        SCND=`echo $RUN_ID1 | cut -c1-3`
        THRD=`echo $RUN_ID1 | cut -c5,6`
        FRTH=`echo $RUN_ID1 | cut -c7,8`
fi

PSEUDO_DATE="${FIRST}${SCND}-${THRD}-$FRTH"

if [ $FIRST = X ]
then
	echo X
else
	echo $PSEUDO_DATE
fi
}

# =========================================================================
# 0.3.2 writeLog
# =========================================================================
writeLog(){
TIMESTAMP=`date +%Y-%m-%d" "%H:%M:%S`
echo "[$TIMESTAMP]	$1" | tee -a $PLOG 
}

# =========================================================================
# 0.3.3
# writeStats: write some baseline statistics
# $1: PROCESS_ONLY or FINISHED --> flag to know where to find the ABs
# =========================================================================
writeStats(){
writeLog "$N_SCI_TOT ABs found; $NUM_AB_REJ rejected; $NUM_AB processed"
writeLog "JOBS begin: $TIMESTAMP1 ($TIMESTAMP_START)"
writeLog "JOBS end  : $TIMESTAMP2 ($TIMESTAMP_END)"
DIFF_TIME=`echo $TIMESTAMP_END $TIMESTAMP_START | awk '{print $1-$2}'`
writeLog "JOBS total time: $DIFF_TIME sec"

# AB processing
# with CONDOR?
if [ $DRS_TYPE = CON ]
then
# we sum up all AB executions
	#TOT_TIME1=`grep "DAG with ID: "   $PLOG | grep -v QC | sed "s/^.*Total time://" | sed "s/s//" | awk '{printf "%5.2f\n",$1}'`
	TOT_TIME1=`grep "DAG with ID: "   $PLOG | grep -v QC | sed "s/^.*Total time://" | sed "s/s//" | awk '{sum +=$1} END {print sum}'`
else
	TOT_TIME1=$DIFF_TIME
fi

# QC processing (IMPLICIT and SER: exectime included in TOT_TIME1)
case $QCJOB_CONCENTRATION in
 NO  ) 	case $QCBQS_TYPE in
	 "PAR" )      TOT_TIME2=`grep "DAG with ID: QC" $PLOG | sed "s/^.*Total time://" | sed "s/s//" | awk '{printf "%5.2f\n",$1}'` ;;
	 "SER" )      if [ $1 = PROCESS_ONLY ] && [ $DEEP_MODE = NO ]
		      then
			TOT_TIME2a=`grep "TQCEXEC" $DFO_AB_DIR/*${DATE}T2*.ab 2>/dev/null | awk '{sum +=$2} END {print sum}'`
			DATE1=`qcdate $DATE +1`
			TOT_TIME2b=`grep "TQCEXEC" $DFO_AB_DIR/*${DATE1}T[0-1]*.ab 2>/dev/null | awk '{sum +=$2} END {print sum}'`
			TOT_TIME2=`echo $TOT_TIME2a $TOT_TIME2b | awk '{print $1+$2}'`
		      elif [ $DEEP_MODE = NO ]
		      then
			TOT_TIME2=`grep "TQCEXEC" $DFO_LOG_DIR/$DATE/*.ab | awk '{sum +=$2} END {print sum}'`
# $DEEP_MODE = YES
		      else
			rm -f $TMP_DIR/ph_all_abs
			grep "^DATE" $DFO_AB_DIR/*.ab | grep $DATE | sed "s/:DATE.*//" > $TMP_DIR/ph_all_abs
			TOT_TIME2=0
			if [ -s $TMP_DIR/ph_all_abs ]
			then
				grep TQCEXEC `cat $TMP_DIR/ph_all_abs` | awk '{sum +=$2} END {print sum}' > $TMP_DIR/TOT_TIME2
				if [ -s $TMP_DIR/TOT_TIME2 ]
				then
					TOT_TIME2=`cat $TMP_DIR/TOT_TIME2`
					if [ Q$TOT_TIME2 = Q ]
					then
						TOT_TIME2=0
					fi
				else
					TOT_TIME2=0
				fi
			fi
		      fi 
		      ;;
	 "IMPLICIT" ) TOT_TIME2=0 ;;
	esac ;;
 YES ) TOT_TIME2="TOT_TIMEQC_$DATE" ;;
esac

writeLog "condor time for AB processing: $TOT_TIME1 sec" 
if [ $QCJOB_CONCENTRATION = NO ]
then
	writeLog "condor time for QC processing: $TOT_TIME2 sec"
fi

# number and size of processed raw files
case $MODE in
  "CALIB"   ) YM=`echo $DATE | cut -c1-7`; NUM_RAW=`ls $DFO_RAW_DIR/${YM}*/*fits | wc -l` 
	      SIZE_RAW=`du -ks $DFO_RAW_DIR/${YM}*/ | awk '{sum +=$1/1024.} END {print sum}'` ;;
  "SCIENCE" ) NUM_RAW=`ls $DFO_RAW_DIR/$DATE/*fits | wc -l` 
	      SIZE_RAW=`du -ks $DFO_RAW_DIR/$DATE/ | awk '{print $1/1024.}'` ;;
esac

TIMESTAMP_SH=`echo $TIMESTAMP | awk '{print $1}'`

# add new entry
sed -i -e "/${DATE}/d" $STAT_FILE
echo "$DATE	$N_SCI_TOT	$NUM_AB_REJ	$NUM_AB	$TOT_TIME1	$TOT_TIME2	$NUM_RAW	$SIZE_RAW	N_prod	size_prod	$TIMESTAMP_SH" >> $STAT_FILE

# sort by date/run_ID
cat $STAT_FILE | grep "^#" > $TMP_DIR/phoenix_daily
cat $STAT_FILE | grep -v "^#" | sort -k1,1 >> $TMP_DIR/phoenix_daily
mv $TMP_DIR/phoenix_daily $STAT_FILE
}

# =========================================================================
# 0.3.4 checkOCAConfig: find the proper OCA file for createAB
# ========================================================================
checkOCAConfig(){
rm -f $TMP_DIR/ph_list_ocaconfig

grep "^OCA" $CONFIG | grep "[[:space:]]${RELEASE}[[:space:]]" | awk '{print $3,$4,$5}' | sort -u -k3  > $TMP_DIR/ph_list_ocaconfig

# none found --> assumed: just the standard file, no issue
if [ ! -s $TMP_DIR/ph_list_ocaconfig ]
then
	if [ ! -s $DFO_CONFIG_DIR/OCA/${DFO_INSTRUMENT}_association.h ]
	then
		echo "***ERROR: no file $DFO_CONFIG_DIR/OCA/${DFO_INSTRUMENT}_association.h found, can't create ABs. Exit."
		exit -1
	fi
else
	for REF in `cat $TMP_DIR/ph_list_ocaconfig | awk '{print $3}'`
	do
		REF1=`echo $REF   | sed "s/-//g"`
		DATE1=`echo $DATE | sed "s/-//g"`
# for MONTHLY mode
		if [ Q$DATE1 = Q ]
		then
			DATE1=`echo ${MONTH}-01 | sed "s/-//g"`
		fi

		if [ $DATE1 -le $REF1 ]
		then
			ORGAN_CONFIG=`grep $REF $TMP_DIR/ph_list_ocaconfig | awk '{print $1}'`
			ASSOC_CONFIG=`grep $REF $TMP_DIR/ph_list_ocaconfig | awk '{print $2}'`
			if [ ! -s $DFO_CONFIG_DIR/OCA/$ORGAN_CONFIG ] || [ ! -s $DFO_CONFIG_DIR/OCA/$ASSOC_CONFIG ]
			then
				echo "***ERROR: no file $ORGAN_CONFIG or $ASSOC_CONFIG found, can't create ABs. Exit."
				exit
			fi

			cp $DFO_CONFIG_DIR/OCA/$ORGAN_CONFIG $DFO_CONFIG_DIR/OCA/${DFO_INSTRUMENT}_organisation.h
			cp $DFO_CONFIG_DIR/OCA/$ASSOC_CONFIG $DFO_CONFIG_DIR/OCA/${DFO_INSTRUMENT}_association.h
			writeLog "  versioned OCA rules used: ${ORGAN_CONFIG}, ${ASSOC_CONFIG}"
			break
		fi
	done
fi
}

# =========================================================================
# 0.3.5 qualityChecker: compile historical and current quality information
#       This goes into a job file, to be called by JOBS_PHOENIX after 
#	AB/QC processing
# ========================================================================
qualityChecker(){
cat > $DFO_JOB_DIR/qualityChecker_$DATE <<EOT
#!/bin/sh
# qualityChecker: compiles historical and current quality information
# auto-created by $TOOL_NAME
if [ $COMPLETE_MODE = YES ]
then
	AB_LIST1=`echo $AB_LIST | sed "s/ALL/CALIB/"`
else
	AB_LIST1=$AB_LIST
fi

# for COMPLETE_MODE, it could be that an AB has been already been handled for a previous DATE
for AB in \`cat $DFO_MON_DIR/\$AB_LIST1 | awk '{print \$1}'\`
do
	if [ ! -s $DFO_AB_DIR/\$AB ]
	then
		sed -i -e "/\$AB/d" $DFO_MON_DIR/\$AB_LIST1
	fi
done
	
for AB in \`cat $DFO_MON_DIR/\$AB_LIST1 | awk '{print \$1}'\`
do
	ATAB=\`echo \$AB | sed "s/.ab/.tab/"\`
	TLOG=\`echo \$AB | sed "s/.ab/.tlog/"\`
# bug in scoreQC -->remove
	sed -i -e "s/HC_MARK/HC_NONE/" $DFO_AB_DIR/\$ATAB	
# normal result -->no need
	#still useful if no scoring done! sed -i -e "s/222X/X/" $DFO_AB_DIR/\$ATAB

	PROCESS_STATUS=\`grep "^PROCESS_STATUS" $DFO_AB_DIR/\$AB | awk '{print \$2}'\`
	SCORE_STATUS=\`grep "^AB_STATUS"	$DFO_AB_DIR/\$AB | grep scored | awk '{print \$3}' | tail -1\` 
	CHECK_REJ=\`grep " CERTIF_REJ " 	$DFO_AB_DIR/\$AB\`

	SCORE_RESULT=""
        TOTAL_NUMBER=0
        if [ Q\$SCORE_STATUS = Qscored ] && [ -s $DFO_AB_DIR/\$TLOG ]
        then
                SCORE_RESULT=\`grep "^TOTAL_SCORE" $DFO_AB_DIR/\$TLOG | awk '{print \$2}'\`
                TOTAL_NUMBER=\`grep "^TOTAL_NUMBER" $DFO_AB_DIR/\$TLOG | awk '{print \$2}'\`
        fi

# HISTO content
# (check for codes in phoenix section CERTIF_CASES)
# 222 means 'processed, scored grn, certified'
# 220 means 'processed, scored grn, no certif information': handled same as 222 since likely a tool bug
# 022 handled like 222 since likely a tool bug (QC processed and certified also means processed)
	CHECK_222=\`egrep "222X|220X|022X" $DFO_AB_DIR/\$ATAB\`
# 000 or 200 means 'no histo information'
	CHECK_000=\`egrep "000X|200X" $DFO_AB_DIR/\$ATAB | awk '{print \$1}'\`

# now we mark the CERTIF column, to prepare for autoCertifier
	if [ Q\$PROCESS_STATUS = QFAILED ]
	then
		sed -i -e "s/CERTIFGREY/CERTIFLIGHTRED/" $DFO_AB_DIR/\$ATAB
               	sed -i -e "s/CERTIF_NONE/CERTIF_FAILED/" $DFO_AB_DIR/\$ATAB
# if previously failed, but now succeeded: update
	else
               	sed -i -e "s/CERTIF_FAILED/CERTIF_NONE/" $DFO_AB_DIR/\$ATAB
	fi
		
# score result now grn, plus histo result 222 (or 000): --> auto-certify
	if [ "Q\$SCORE_RESULT" = Q0 ] && ( [ "Q\$CHECK_222" != Q ] || [ "Q\$CHECK_000" != Q ] ) && [ "Q\$CHECK_REJ" = Q ]
	then
               	sed -i -e "s/CERTIFGREY/CERTIFGREEN/"  $DFO_AB_DIR/\$ATAB
# could be remnant from previous scoring
                sed -i -e "s/CERTIFLIGHTRED/CERTIFGREEN/" $DFO_AB_DIR/\$ATAB
# could be enabled but better to leave it for autoCertifier
               	#sed -i -e "s/CERTIF_NONE/CERTIF_AUTO/" $DFO_AB_DIR/\$ATAB
	fi

# rejected? Force to LIGHTRED
	if [ "Q\$CHECK_REJ" != Q ]
	then
		sed -i -e "s/CERTIFGREEN/CERTIFLIGHTRED/"  $DFO_AB_DIR/\$ATAB
               	sed -i -e "s/CERTIF_AUTO/CERTIF_REJ/" $DFO_AB_DIR/\$ATAB
	fi

# histo information showed issue, current not --> mark for review
	CHECK_fail=\`egrep "1[0-2][0-2]X" $DFO_AB_DIR/\$ATAB\`
	CHECK_red=\`egrep "[0-2]1[0-2]X" $DFO_AB_DIR/\$ATAB\`
	CHECK_rej=\`egrep "[0-2][0-2]1X" $DFO_AB_DIR/\$ATAB\`

	if [ "Q\$SCORE_RESULT" = Q0 ] && ( [ "Q\$CHECK_fail" != Q ] || [ "Q\$CHECK_red" != Q ] || [ "Q\$CHECK_rej" != Q ])
	then
		sed -i -e "s/CERTIFGREY/CERTIFLIGHTRED/"  $DFO_AB_DIR/\$ATAB
	fi

# no scoring done? We then mark the historical result in color
	if [ "Q\$SCORE_RESULT" = Q ]
	then
		if [ "Q\$CHECK_222" != Q ]
		then
			sed -i -e "s|<fontXYZsize=1XYZcolor=#666>222XYZ|<fontXYZsize=1XYZcolor=#093><b>222</b>XYZ|" $DFO_AB_DIR/\$ATAB
			sed -i -e "s|<fontXYZsize=1XYZcolor=#666>220XYZ|<fontXYZsize=1XYZcolor=#093><b>220</b>XYZ|" $DFO_AB_DIR/\$ATAB
			sed -i -e "s|<fontXYZsize=1XYZcolor=#666>022XYZ|<fontXYZsize=1XYZcolor=#093><b>022</b>XYZ|" $DFO_AB_DIR/\$ATAB
		fi	

# current info shows score issue --> mark for review
        elif [ "Q\$SCORE_RESULT" != Q0 ] || [ Q\$TOTAL_NUMBER = Q0 ]
        then
                sed -i -e "s/CERTIFGREY/CERTIFLIGHTRED/"  $DFO_AB_DIR/\$ATAB
# if config.scoreQC has been modified, the GREEN flag could be wrong now, hence also check for this case
                sed -i -e "s/CERTIFGREEN/CERTIFLIGHTRED/" $DFO_AB_DIR/\$ATAB
        fi

# add more cases if required
done
getStatusAB -d $DATE -f
EOT
chmod u+x $DFO_JOB_DIR/qualityChecker_$DATE
}

# =========================================================================
# 0.3.6 autoCertifier: scan the results for ABs to be reviewed
#       partly taken from certifyProducts
#	optional argument $1=$AB (to jump to selected AB)
# =========================================================================
autoCertifier(){
ACCEPT_NOSCORE=`grep "^ACCEPT_NOSCORE" $CONFIG | awk '{print $2}'`
if [ Q$ACCEPT_NOSCORE != QYES ]
then
	ACCEPT_NOSCORE=NO
fi

rm -f $TMP_DIR/AB_LIST
if [ Q$1 != Q ]
then
	echo $1 > $TMP_DIR/AB_LIST
	SINGLE_MODE=YES
else
	cp $DFO_MON_DIR/$AB_LIST $TMP_DIR/AB_LIST
	SINGLE_MODE=NO
fi

for AB in `cat $TMP_DIR/AB_LIST | awk '{print $1}'`
do
	ATAB=`echo $AB | sed "s/.ab/.tab/"`
	TLOG=`echo $AB | sed "s/.ab/.tlog/"`
	HTML=`echo $AB | sed "s/.ab/.html/"`

	if [ $COMPLETE_MODE = YES ]
	then
		DATE1=`grep "^CIVIL_DATE" $DFO_AB_DIR/$AB | awk '{print $2}' | head -1`
	else
		DATE1=`grep "^DATE" $DFO_AB_DIR/$AB | awk '{print $2}'`
	fi
	PROD_PATH=`grep "^PROD_PATH" $DFO_AB_DIR/$AB | awk '{print $2}'`
	PROD_PATH=`eval "echo $PROD_PATH"`
	PROD_ROOT_NAME=`grep "^PROD_ROOT_NAME" $DFO_AB_DIR/$AB | awk '{print $2}'`

	if [ ! -s $DFO_AB_DIR/$ATAB ]
	then
		echo "***ERROR: no $ATAB file found, can't certify."
		echo "Hit return:"
		read input
	else
		CHECK_GREEN=`grep " CERTIFGREEN " $DFO_AB_DIR/$ATAB | awk '{print $1}'`
		CHECK_PROD=`ls $PROD_PATH | grep ${PROD_ROOT_NAME} | grep fits | head -1`
		CHECK_SCORE=`grep " SCOLGREEN " $DFO_AB_DIR/$ATAB`
# BWo: assuming tool is called with -d option if this branch is reached
		MONTH_SCP=`echo $DATE | cut -c1-7`
		if [ -s $TMP_DIR/ph_old_info_$MONTH_SCP ]
		then
			HISTO_SCORE=`grep $AB $TMP_DIR/ph_old_info_$MONTH_SCP | awk '{print $2}'`
		else
			HISTO_SCORE=`grep $AB $TMP_DIR/ph_old_info_$DATE | awk '{print $2}'`
		fi
# force review
		if [ SINGLE_MODE = YES ]
		then
			CHECK_GREEN=""
		fi

# auto-certified
		if [ "Q$CHECK_GREEN" != Q ] && [ $SINGLE_MODE = NO ]
		then
			sed -i -e "s/CERTIF_NONE/CERTIF_AUTO/" $DFO_AB_DIR/$ATAB
			sed -i -e "/processed by /a AB_STATUS	- auto-certified by 'autoCertifier@phoenix'	on`date` by `whoami` on `hostname` " $DFO_AB_DIR/$AB

# FAILURE
		elif [ Q$CHECK_PROD = Q ]
		then
			echo "    $AB failed."
			echo "      Please enter a significant comment about the failure:"
			RETURN=-1
			while [ $RETURN -lt 0 ]
			do
				enterProdComments $DATE1
			done	
# HISTO_SCORE=222 and ACCEPT_NOSCORE=YES
		elif [ $ACCEPT_NOSCORE = YES ] && [ Q$HISTO_SCORE = Q222 ]
		then
			sed -i -e "s/CERTIF_NONE/CERTIF_AUTO/" $DFO_AB_DIR/$ATAB
			sed -i -e "/processed by /a AB_STATUS	- auto-certified by 'autoCertifier@phoenix'	on `date` by `whoami` on `hostname` " $DFO_AB_DIR/$AB
			echo "	$AB auto-certified ($HISTO_SCORE)"

# Score > 0 or historical issue or SINGLE_MODE=YES
		else
			if [ $SINGLE_MODE = YES ]
			then
				echo "    $AB selected for review "
			elif [ "Q$CHECK_SCORE" != Q ]
			then
				echo "    Issue with $AB (historical failure/red score/rejection) "
			else

			#elif [ $ACCEPT_NOSCORE = NO ]
			#then
				echo "    Issue with $AB (score>0 or no score) "
			fi

			if [ $ACCEPT_NOSCORE = NO ]
			then
				echo "      Start reviewing (check scores, QC reports) and then decide ..."
			fi

			if [ ! -s $DFO_AB_DIR/$HTML ] 
                        then
				case $ACCEPT_NOSCORE in
					"NO"   ) echo "	 No scores found for $AB, something went wrong!" 
						 echo "	 If there is a pettern, consider setting ACCEPT_NOSCORE to YES." ;;
					"YES"  ) echo "	$AB: No scores found, HISTO_CERTIF=$HISTO_SCORE." ;;
				esac
                        else
                                $DFO_GUI_DIR/refresh_browser $DFO_AB_DIR/$HTML
                        fi

			RETURN=-1
			while [ $RETURN -lt 0 ]
			do
				echo "	 Certified (y | n ) ? (no default)"
				read QCPASSED_YN
				if [ Q$QCPASSED_YN != Qy ] && [ Q$QCPASSED_YN != Qn ]
				then
					RETURN=-1
				else
					RETURN=0
				fi
			done

			if [ "Q$QCPASSED_YN" = "Qn" ]
			then
				MAXSTEP=4
				echo "      Product(s) rejected. Entering rejection workflow ..."
				echo ""
				echo "      Step 1 of $MAXSTEP (COMMENTS): Please enter a significant comment about the rejected product file(s):"
				read COMM_YN
				RETURN=-1
				while [ $RETURN -lt 0 ]
				do
					enterProdComments $DATE1
				done	

				sed -i -e "/processed by /a AB_STATUS	- rejected by 'autoCertifier@phoenix'	on`date` by `whoami` on `hostname` " $DFO_AB_DIR/$AB

				sed -i -e "s/CERTIFGREY/CERTIFLIGHTRED/" $DFO_AB_DIR/$ATAB
                		sed -i -e "s/CERTIF_NONE/CERTIF_REJ/" $DFO_AB_DIR/$ATAB
# if certified before ...
				sed -i -e "s/CERTIFGREEN/CERTIFLIGHTRED/" $DFO_AB_DIR/$ATAB
                		sed -i -e "s/CERTIF_AUTO/CERTIF_REJ/" $DFO_AB_DIR/$ATAB
                		sed -i -e "s/CERTIF_OK/CERTIF_REJ/" $DFO_AB_DIR/$ATAB

# enter list_REJECTED
				echo "     Step 2 of $MAXSTEP (list_REJECTED): should file_IDs be listed in list_REJECTED (hence not be used the next time for AB creation)?"
				echo "       Pro: they won't ask for decision again (makes sense if the raw files have a quality issue, but then you may want to simply hide them)"
				echo "       Con: they won't benefit from pipeline improvements (if their rejection is due to pipeline issues, not to quality issues)"
				echo "       Enter Y/N (N):"
				read ENTER_YN
				if [ Q$ENTER_YN = QY ]
				then
					for RAWFILE in `grep "^RAWFILE" $DFO_AB_DIR/$AB | awk '{print $2}'`
					do
						RF1=`filename $RAWFILE | sed "s/.fits//"`
						if [ ! -s $DFO_MON_DIR/list_REJECTED ]
						then
							echo "$RF1      <comment>" >> $DFO_MON_DIR/list_REJECTED
						else
							CHECK_EXIST=`grep $RF1 $DFO_MON_DIR/list_REJECTED 2>/dev/null | head -1`
							if [ "Q$CHECK_EXIST" = Q ]
							then
								echo "$RF1	<comment>" >> $DFO_MON_DIR/list_REJECTED
							fi
						fi
					done	
					echo "       File_IDs have been entered in $DFO_MON_DIR/list_REJECTED. "
					echo "       They won't be offered for AB creation the next time."
					echo "       Please enter also a significant comment in $DFO_MON_DIR/list_REJECTED, for your own memory."
					echo "       Hit return when you are done:"
				else
					echo "       ... not entered (they would benefit from pipeline improvements). Hit return:"
				fi
				read input

# analyze dependencies
	                        echo "     Step 3 of $MAXSTEP (DEPENDENCIES): Other ABs using products from this AB:"
				rm -f $TMP_DIR/list_others
				grep "^MCALIB" $DFO_AB_DIR/*ab | grep $AB | grep -v "^$AB" | sed "s/:MCALIB.*//" | grep -v $AB | sort -u > $TMP_DIR/list_others
				if [ -s $TMP_DIR/list_others ]
				then
					echo "AB_NAME                          RAW_TYPE | SETUP"
        				for OTH in `cat $TMP_DIR/list_others`
        				do
                				FILENAME=`basename $OTH`
                				grep $FILENAME $DFO_MON_DIR/$AB_LIST 
        				done
        				echo "       You either need to reject them as well, or"
        				echo "       re-create and re-execute them, after you have removed the offending products and entered the "
					echo "       file IDs of the current AB in the list_REJECTED." 
				else
        				echo "       None found."
				fi
	
                        	echo "      Hit return:"
                        	read input

# product deletion
				echo "     Step 4 of $MAXSTEP (DELETE FITS PRODUCTs) (mandatory). Hit return to proceed ..."
				read input
				rm -f `ls ${PROD_PATH}/${PROD_ROOT_NAME}*fits 2>/dev/null`
				
				echo "    ... $AB rejected, all products deleted."
				echo ""
# let pass w/o comments
			elif [ $ACCEPT_NOSCORE = YES ]
			then
				sed -i -e "s/CERTIF_NONE/CERTIF_OK/" $DFO_AB_DIR/$ATAB
				sed -i -e "/processed by /a AB_STATUS	- certified by 'autoCertifier@phoenix'	on `date` by `whoami` on `hostname` " $DFO_AB_DIR/$AB
# offer comments
			elif [ $ACCEPT_NOSCORE = NO ]
			then
# let it optional
				#echo "    Please enter a significant comment about product files:"
				echo "    Want to enter a comment about product files? (Y/N) (N)"
				read ENTER_YN
				if [ Q$ENTER_YN = QY ]
				then

					RETURN=-1
					while [ $RETURN -lt 0 ]
					do
						enterProdComments $DATE1
					done	
				fi
	
				LINE_SOFSECT=`grep -n "SOF section starts" $DFO_AB_DIR/$AB | sed "s/:# =====.*//" | awk '{print $1-1}'`
				cat $DFO_AB_DIR/$AB | awk '{if (NR < n ) {print $0}}' n=$LINE_SOFSECT > $TMP_DIR/new_AB
				echo "AB_STATUS	- certified by 'autoCertifier@phoenix'	on `date` by `whoami` on `hostname`" >> $TMP_DIR/new_AB
				cat $DFO_AB_DIR/$AB | awk '{if (NR >= n ) {print $0}}' n=$LINE_SOFSECT >> $TMP_DIR/new_AB
				mv $TMP_DIR/new_AB $DFO_AB_DIR/$AB

# could be flagged red by qualityChecker but now is certified --> green
				sed -i -e "s/CERTIFLIGHTRED/CERTIFGREEN/" $DFO_AB_DIR/$ATAB
				sed -i -e "s/CERTIFGREY/CERTIFGREEN/" $DFO_AB_DIR/$ATAB

                		sed -i -e "s/CERTIF_NONE/CERTIF_OK/"  $DFO_AB_DIR/$ATAB

                        	echo "   $AB: certified. Continue ..."
				echo ""
			fi

		fi
	fi
done

# Offer to jump to any other AB
echo "			... done with autoCertifier."
echo "			Do you want to jump to some other AB (j), refresh the qualityChecker (q) and be asked again, or exit (e)? (e)"
read JUMP_YN
if [ "Q$JUMP_YN" = Qq ]
then
	$DFO_JOB_DIR/qualityChecker_$DATE
	echo "			Do you want to jump to some other AB (j) or exit (e)? (e)"
	read JUMP_YN
fi
	
if [ "Q$JUMP_YN" = Qj ]
then
	RETURN=-1
	while [ $RETURN -lt 0 ]
	do
		echo "	Enter AB which you want to jump to (check AB monitor or $DFO_MON_DIR/$AB_LIST):"
		read AB
		CHECK_EXIST=`grep $AB $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
		if [ Q$CHECK_EXIST = Q ]
		then
			RETURN=-1
			echo "***ERROR: $AB is not listed in $DFO_MON_DIR/$AB_LIST. Try again (hit return) or exit (e):"
			read TRY_AGAIN
			if [ Q$TRY_AGAIN = Qe ]
			then
				exit
			fi
		else
			RETURN=0
		fi
	done
	autoCertifier $AB
fi
}

# =========================================================================
# 0.3.7 checkRNConfig: find the proper config file for renameProducts
# =========================================================================
checkRNConfig(){
rm -f $TMP_DIR/ph_list_rnconfig

grep "^RENAME" $CONFIG | grep "[[:space:]]${RELEASE}[[:space:]]" | awk '{print $3,$4}' | sort -u -k2  > $TMP_DIR/ph_list_rnconfig

# none found --> assumed: just the standard file, no issue
if [ ! -s $TMP_DIR/ph_list_rnconfig ]
then
	export RNCONFIG=config.renameProducts
else
	for REF in `cat $TMP_DIR/ph_list_rnconfig | awk '{print $2}'`
	do
		REF1=`echo $REF   | sed "s/-//g"`
		DATE1=`echo $DATE | sed "s/-//g"`
# for MONTHLY mode
		if [ Q$DATE1 = Q ]
		then
			DATE1=`echo ${MONTH}-01 | sed "s/-//g"`
		fi

		if [ $DATE1 -le $REF1 ]
		then
			export RNCONFIG=`grep $REF $TMP_DIR/ph_list_rnconfig | awk '{print $1}'`
			break
		fi
	done
fi
}

# =========================================================================
# 0.3.8
# checkSelect: check for issues between predicted names (renameP)
# and existing names (for the same PIPEFILE)
# This procedure implements REPLACE_LOGIC = SELECTIVE
# ========================================================================
checkSelect(){

# find existing mcalibs for that date
if [ ! -s $DFO_CAL_DIR/$DATE/rn_files ]
then
	echo "***ERROR: no rn_files found, can't do."
	exit
else
	CHECK_DIFF=`grep "^#DIFF:" $DFO_CAL_DIR/$DATE/rn_files | head -1`
	if [ "Q$CHECK_DIFF" != Q ]
	then
		echo "***ERROR: rn_files has already been edited, needs to be created again."
		exit
	fi
fi

rm -f $DFO_CAL_DIR/$DATE/RENAMED_ISSUE
MJD1=`qcdate $DATE  | awk '{printf"%10.5f\n",$1+off/24+0.50}' off=$DFO_OFFSET `
MJD2=`echo $MJD1 | awk '{print $1+1}'`

cat > $TMP_DIR/sl_main_query <<EOT
SELECT
	origfile , 
	mjd_obs ,
	pro_catg ,
	ins_mode ,
	pipefile ,
	dp_id 
FROM
	qc_metadata..qc_products
WHERE
	mjd_obs between $MJD1 and $MJD2
AND
	instrume = "$ORIG_DFO_INSTRUMENT"
AND
	virtual != 1
GO
EOT

# find the archived mcalibs
rm -f $TMP_DIR/sl_main_results
isql -Uqc -S$ARCH_SERVER -w999 -P`cat $ARCH_PWD` -i $TMP_DIR/sl_main_query | sed "1,1 d" | grep -v "affected" | grep -v "\-\-\-" | sed "/^$/d" > $TMP_DIR/sl_main_results

# now we check if conflicts exist
rm -f $TMP_DIR/ph_check_newnames
echo "#FLAG	PIPEFILE					ARCHIVE_NAME	NEW_NAME" > $TMP_DIR/ph_check_newnames

rm -f $TMP_DIR/sl_candidates
CHECK_EXIST=`ls $DFO_CAL_DIR/$DATE | grep r. | grep fits | head -1`
if [ "Q$CHECK_EXIST" = Q ]
then
	writeLog "     ... no files found to check. Ok."
	exit
fi

# otherwise check
rm -f $TMP_DIR/ph_diffs1
for PF in `ls $DFO_CAL_DIR/$DATE/r.*fits`
do
	PF1=`basename $PF`
# check for mcalibs that have entered more than once with different pro.catg and 
# arcnames
	CHECK_ARC=`grep $PF1 $TMP_DIR/sl_main_results | awk '{print $1}' | wc -l`
	if [ Q$CHECK_ARC != Q1 ] && [ Q$CHECK_ARC != Q0 ]
	then
		echo "$PF1 has been used for more than one mcalib:" >> $TMP_DIR/ph_diffs1
		grep $PF1 $TMP_DIR/sl_main_results | awk '{print $1,$3,$5,$6}' >> $TMP_DIR/ph_diffs1
	fi

	ARCNAME=`grep $PF1 $TMP_DIR/sl_main_results | awk '{print $1}' | head -1`
	RN_NAME=`grep $PF1 $DFO_CAL_DIR/$DATE/rn_files | awk '{print $3}' | head -1`

	if [ Q$ARCNAME = Q ]
	then
		echo "NEW***  $PF1     <none>            $RN_NAME" >> $TMP_DIR/ph_check_newnames
	elif [ $ARCNAME = $RN_NAME ]
	then
		echo "SAME    $PF1 $ARCNAME" >> $TMP_DIR/ph_check_newnames
	else
		echo "DIFF*** $PF1 $ARCNAME $RN_NAME" >> $TMP_DIR/ph_check_newnames
	fi
done

# now we edit the rn_files file
CHECK_DIFF=`grep "DIFF" $TMP_DIR/ph_check_newnames | head -1`
if [ "Q$CHECK_DIFF" = Q ] && [ ! -s $TMP_DIR/ph_diffs1 ]
then
	echo "... no differences found between new file names and existing ones."
	exit
else
	rm -f $TMP_DIR/ph_diffs
	echo "Differences found between new file names and existing ones. 
Go to \$DFO_CAL_DIR/$DATE, check 'rn_files' and 'RENAMED_ISSUE', 
decide about the proper names, edit 'rn_files' (make the lines active) and 
execute it.
Then, call 'phoenix -d $DATE -X -M' again.
How to find the proper names:
a) check the current archive name for a file marked in rn_files;
b) give it that 'original' name; 
c) now check for other instance with the new name, rename it to some other version;
d) remove the DIFF tag
e) repeat for all other conflicts
" > $TMP_DIR/ph_diffs

	cat $TMP_DIR/ph_diffs1 >> $TMP_DIR/ph_diffs 2>/dev/null

	egrep "NEW|DIFF" $TMP_DIR/ph_check_newnames >> $TMP_DIR/ph_diffs
	mail -s "PHOENIX: Issue with calib names for $DATE" $OP_ADDRESS <$TMP_DIR/ph_diffs
	cat $TMP_DIR/ph_diffs | tee -a $DFO_CAL_DIR/$DATE/RENAMED_ISSUE

# ignore <none> entries: files never ingested before, not an issue
	for PF in `egrep "^NEW|^DIFF" $TMP_DIR/ph_diffs | grep -v "<none>" | awk '{print $2}'`
	do
		sed -i -e "/$PF/s/^.*/#DIFF: &/" $DFO_CAL_DIR/$DATE/rn_files
	done
fi
}

# =======================================================================
# 0.3.9 procedure enterProdComments: enter comments about product file(s)
#       [taken from certifyProducts and simplified]
#       comments are inserted into $COMMENT_FILE [not into DFO database!]
#       $1=real DATE
# =======================================================================
enterProdComments(){
COMMENT_FILE="$DFO_LST_DIR/PRODCOM/prod_comment_$1.txt"
COMMENT_EDITOR=`eval "echo $DFO_EDITOR $COMMENT_FILE"`
if [ ! -s $COMMENT_FILE ]
then
	cat > $COMMENT_FILE <<EOT
#These comments are collected by the phoenix autoCertifier. 
#Entries starting with '#', and empty comments will be skipped.
#MODE   RUN_ID          OB_ID   DATE       PRODUCT                                      Comment
#==============================================================================================
EOT
fi

RUN_ID=`grep "^OBS_PROG_ID" $DFO_AB_DIR/$AB | awk '{print $2}' | sed "s/(//" | sed "s/)//"`
OBS_ID=`grep "^OBS_ID"      $DFO_AB_DIR/$AB | awk '{print $2}'`
MJD_OBS=`grep "^MJD-OBS[[:space:]]" $DFO_AB_DIR/$AB | awk '{print $2}'`
MODE1=`grep "^DPR_CATG"	    $DFO_AB_DIR/$AB | awk '{print $2}'`

ENTRY_CHECK=`grep -v "^#" $COMMENT_FILE | tr "\011" " " | sed "s/ *//g" | grep "${MODE1}${RUN_ID}${OBS_ID}${1}${PROD_ROOT_NAME}" | head -1` 
if [ "Q$ENTRY_CHECK" = Q ]
then
	echo "$MODE1     $RUN_ID $OBS_ID $1 $PROD_ROOT_NAME" >> $COMMENT_FILE	
fi

echo "      [comment_editor] Now we open the comment file with ${DFO_EDITOR}.
          - Avoid apostrophs '
          - add comments in-line with PRODUCT names, no line breaks
          - don't modify the PRODUCT name
          - the comment file may have other entries (don't delete them). Hit return:"
read input

# call $COMMENT_EDITOR
$COMMENT_EDITOR

COMM_CHECK=`grep -v "^#" $COMMENT_FILE | tr "\011" " " | sed "s/ *//g" | grep "${MODE1}${RUN_ID}${OBS_ID}${1}${PROD_ROOT_NAME}" | tail -1 | sed "s/${MODEi1}${RUN_ID}${OBS_ID}${1}${PROD_ROOT_NAME}//"`

if [ "Q$COMM_CHECK" = Q ]
then
	echo "[comment_editor] It seems you haven't inserted a comment. Please try again:"
	read input
	export RETURN=-1
else
# edit ATAB file
	ATAB=`echo $AB | sed "s/.ab/.tab/"`
	PROD_COMMENT=`grep -v "^#" $COMMENT_FILE | grep ${PROD_ROOT_NAME}| sed "s/^.*${PROD_ROOT_NAME}//"`
	PROD_COMMENT1=`echo $PROD_COMMENT | sed "s/ /XYZ/g"`
	sed -i -e "s|^.*|&<fontXYZsize=1>${PROD_COMMENT1}</font>|" $DFO_AB_DIR/$ATAB
	
	echo "[comment_editor] ... done. Hit return:"
	read input
	export RETURN=0
fi
}

# end of procedures

# =======================================================================
# 0.4 Month (just a call for all dates in a given month)
# 0.4.1 propagate all flags
# =======================================================================

if [ Q$MONTH != Q ]
then
	PARAM2=""
	PARAM4=""
	if [ $CREATE_ONLY = YES ]
	then
		PARAM="-C"
	elif [ $MOVEP_ONLY = YES ]
	then
		PARAM="-M"
	elif [ $PROCESS_ONLY = YES ]
	then
		PARAM="-P"
	elif [ $PROCESS_PART = YES ]
	then
		PARAM2="-p"
	else
		PARAM=""
	fi

	if [ $MCAL_MODE = YES ]
	then
		if [ $OVERRIDE_HISTO = YES ]
		then
			PARAM4="-X"
		else
			PARAM4="-X -x"
		fi
	fi

# =======================================================================
# 0.4.2 QCJOB_CONCENTRATION (for SCIENCE only)
# =======================================================================

	if [ $QCJOB_CONCENTRATION = YES ] 
	then
		if [ Q$PARAM = "Q-C" ] || [ Q$PARAM = "Q-P" ]
		then
			PARAM3="-q"
			rm -f $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} $DFO_JOB_DIR/execQC_SCIENCE_${MONTH}
		elif [ Q$PARAM = "Q-M" ]
		then
			rm -f $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} $DFO_JOB_DIR/execQC_SCIENCE_${MONTH}
		elif [ Q$PARAM = "Q" ]
		then
			PARAM3="-q"
			rm -f $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} $DFO_JOB_DIR/execQC_SCIENCE_${MONTH}
			SECOND_CALL=YES	#to remember later that we need to run -M
			PARAM="-P"
		fi	
	fi	

# =======================================================================
# 0.4.3 monthly pattern module (CALIB)
# =======================================================================

	if [ $MCAL_MODE = YES ]
	then
# check config.moveProducts: disable renameProducts for CALIBs, must be done separately (for consistency checks and versioning)
		if [ -s $DFO_CONFIG_DIR/config.moveProducts ]
		then
			RENAME_NEW=`grep "^RENAME" $DFO_CONFIG_DIR/config.moveProducts | grep CALIB | awk '{print $3}'`
			if [ Q$RENAME_NEW = QYES ]
			then
				echo "***WARNING: for MCAL_MODE=YES, we call renameProducts with a module to check for proper new names. 
Config key RENAME has been disabled in \$DFO_CONFIG_DIR/config.moveProducts."
				sed -i -e "s/^RENAME[[:space:]]*CALIB[[:space:]]*YES/RENAME	CALIB	NO	#disabled by phoenix/" $DFO_CONFIG_DIR/config.moveProducts
			fi
		else
			echo "***ERROR: no \$DFO_CONFIG_DIR/config.moveProducts found, can't do."
			exit -1
		fi

# find the proper config file for renameProducts
		checkRNConfig

		if [ -s $DFO_CONFIG_DIR/$RNCONFIG ]
		then
			CHECK_NO=`grep "^CODE" $DFO_CONFIG_DIR/$RNCONFIG | grep -v SCIENCE | grep "[[:space:]]*NONO" | head -1`
			if [ "Q$CHECK_NO" != Q ]
			then
				echo "*** ERROR: $RNCONFIG apparently has PRO_CATG values for CALIB that are configured as NONO.
This might lead to inconsistencies upon renaming. Please check and fix."
				exit -1
			fi
		else
			echo "*** ERROR: no $DFO_CONFIG_DIR/$RNCONFIG found, can't do."
			exit -1
		fi

# check POOL_SIZE
		POOL_SIZE=`grep "^POOL_SIZE" $CONFIG | awk '{print $2}'`
		if [ Q$POOL_SIZE = Q ]
		then
			echo "***WARNING: POOL_SIZE is required for mcalib processing; set to 10 here."
			POOL_SIZE=10
		elif ([ $POOL_SIZE != 10 ] && [ $POOL_SIZE != 15 ] && [ $POOL_SIZE != 30 ])
		then
			echo "***WARNING: POOL_SIZE should be any of 10|15|30 days; set to 10 here."
			POOL_SIZE=10
		fi

# find last date (a bit complex but required to match months properly)
		CHECK_MJD=`qcdate ${MONTH}-31`
		NEW_LAST=`qcdate $CHECK_MJD | cut -c1-7`
		if [ "$NEW_LAST" != "${MONTH}" ]
		then
			CHECK_MJD=`qcdate ${MONTH}-30`
			NEW_LAST=`qcdate $CHECK_MJD | cut -c1-7`
			if [ $NEW_LAST != ${MONTH} ]
			then
				CHECK_MJD=`qcdate ${MONTH}-29`
				NEW_LAST=`qcdate $CHECK_MJD | cut -c1-7`
				if [ $NEW_LAST != ${MONTH} ]
				then
					NEW_LAST=${MONTH}-28
					case $POOL_SIZE in
					 "10" ) NEW_SIZE=8 ;;
					 "15" ) NEW_SIZE=13 ;;
					 "30" ) NEW_SIZE=28 ;;
					esac
				else
					NEW_LAST=${MONTH}-29
					case $POOL_SIZE in
					 "10" ) NEW_SIZE=9 ;;
					 "15" ) NEW_SIZE=14 ;;
					 "30" ) NEW_SIZE=29 ;;
					esac
				fi
			else
				NEW_LAST=${MONTH}-30
				case $POOL_SIZE in
				 "10" ) NEW_SIZE=10 ;;
				 "15" ) NEW_SIZE=15 ;;
				 "30" ) NEW_SIZE=30 ;;
				esac
			fi
		else
			NEW_LAST=${MONTH}-31
			case $POOL_SIZE in
			 "10" ) NEW_SIZE=11 ;;
			 "15" ) NEW_SIZE=16 ;;
			 "30" ) NEW_SIZE=31 ;;
			esac
		fi

# other POOL_SIZES could be added here
		if [ $POOL_SIZE = 10 ]
		then
			$TOOL_NAME -d ${MONTH}-10 $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R 
			$TOOL_NAME -d ${MONTH}-20 $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R
			$TOOL_NAME -d $NEW_LAST   $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R -Y $NEW_SIZE
		elif [ $POOL_SIZE = 15 ]
		then
			$TOOL_NAME -d ${MONTH}-15 $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R 
			$TOOL_NAME -d $NEW_LAST   $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R -Y $NEW_SIZE
		else
			$TOOL_NAME -d $NEW_LAST   $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R -Y $NEW_SIZE
		fi	
		exit
	fi	

# =======================================================================
# 0.4.4 monthly pattern module (SCIENCE)
# =======================================================================

	DAY=1
	while [ $DAY -lt 32 ]
	do
		if [ $DAY -lt 10 ]
		then
			$TOOL_NAME -d ${MONTH}-0$DAY $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R 
		else
# DAY>28: check if ${MONTH}-$DAY is a valid date, otherwise skip
# if invalid date, this will return either nothing (because of 2>/dev/null) or ERROR 
			CHECK_REAL=`qcdate ${MONTH}-$DAY 2>/dev/null`
			if [ Q$CHECK_REAL != Q ] && [ Q$CHECK_REAL != QERROR ]
			then
				$TOOL_NAME -d ${MONTH}-$DAY  $PARAM $PARAM2 $PARAM3 $PARAM4 -s -R
				LAST_REAL=${MONTH}-$DAY
			fi
		fi
		DAY=`echo $DAY | awk '{print $1+1}'`
	done

# =======================================================================
# 0.4.5 QCJOB_CONCENTRATION: create the big QCJOB file
# =======================================================================

	if [ $QCJOB_CONCENTRATION = YES ] && [ Q$MOVEP_ONLY != QYES ]
	then
		PLOG="$TMP_DIR/phoenix1.log"
		rm -f $PLOG
		writeLog "======================================================================================================"
		writeLog "Execution of concentrated QC job file: appended to this log because it is the last one of the month."
		writeLog "8.2 QCJOB_CONCENTRATION = YES: We create the concentrated QC job file ..."

		if [ ! -s $DFO_JOB_DIR/execQC_SCIENCE_${MONTH} ]
		then
			writeLog "... no jobs found! Exit."
			exit
		fi
			
		chmod u+x $DFO_JOB_DIR/execQC_SCIENCE_${MONTH}
		cat > $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} <<EOT
vultur_exec_cascade --dagId=QCSCIENCE_${MONTH}_all --jobs=$DFO_JOB_DIR/execQC_SCIENCE_${MONTH} --wait | tee -a $TMP_DIR/phoenix1.log
EOT

# add sci_QC fag for all dates
		grep "sci_QC" $DFO_JOB_DIR/JOBS_PHOENIX_${MONTH}* | sed "s/^.*echo/echo/" | sort -u >> $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH}
# refresh getStatusAB for all dates
		grep "getStatusAB -d " $DFO_JOB_DIR/JOBS_PHOENIX_${MONTH}* | sed "s/^.*getStatusAB/getStatusAB/" | grep -v "\-r" | sort -u | sed "$,$ s/\&//" >> $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH}

		writeLog "... $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} ready for execution." 

		if [ $CREATE_ONLY = YES ]
		then
			exit
		fi

		writeLog "QCJOB_CONCENTRATION = YES: monthly QC part starts here"
		writeLog "8.3 Call $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} (concentrated QC part) ..."
		chmod u+w $DFS_PRODUCT/*/$DATE/*fits 2>/dev/null
		chmod u+x $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH}

# do it
		$DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH}

# distribute log file to all logs from that month; update stats
		rm -f $TMP_DIR/ph_conc_dates
		cat $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} | grep getStatusAB | awk '{print $3}' > $TMP_DIR/ph_conc_dates
		N_DATES=`cat $TMP_DIR/ph_conc_dates | wc -l`
		TOT_TIME3=`grep "DAG with ID: QCSCIENCE" $PLOG | sed "s/^.*Total time://" | sed "s/s//" | awk '{print $1}'`
		TOT_TIME2=`echo $TOT_TIME3 $N_DATES | awk '{printf "%5.2f\n",$1/$2}'`
		
		for D in `cat $TMP_DIR/ph_conc_dates`
		do
			sed -i -e "s|TOT_TIMEQC_$D|$TOT_TIME2|"  $STAT_FILE
			if [ $D = $LAST_REAL ]
			then
				cat $TMP_DIR/phoenix1.log >> $DFO_MON_DIR/phoenix_$D.log
			fi
		done

# now call mode -M which needs to be done day by day
		if [ Q$SECOND_CALL = QYES ]
		then
			for D in `cat $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} | grep getStatusAB | awk '{print $3}'`
			do
				$TOOL_NAME -d $D -M
			done
# final cleanup of monthly jobs
			rm -f $DFO_JOB_DIR/conc_JOBS_PHOENIX_${MONTH} $DFO_JOB_DIR/execQC_SCIENCE_${MONTH}
		fi
	fi
	exit
fi

# =======================================================================
# 0.4.6 DEEP_MODE: convert RUN_ID to pseudo-date
# =======================================================================
if [ $DEEP_MODE = YES ]
then
	DATE=`createPseudoDate $RUN_ID`
	if [ $DATE = X ]
	then
		echo "***ERROR: can't convert run_ID $RUN_ID into pseudo-DATE. Exit."
		exit -1
	fi

	AB_LIST=AB_list_${MODE}_${DATE}
fi

# =======================================================================
# 1.0 basic operation: get/create ABs for DATE 
# =======================================================================

if [ $MCAL_MODE = YES ]
then
	if [ Q$NEW_SIZE != Q ]
	then
		POOL_SIZE=$NEW_SIZE
	else
		POOL_SIZE=`grep "^POOL_SIZE" $CONFIG | awk '{print $2}'`
	fi
fi

if [ $MOVEP_ONLY = NO ]
then

# check if not suppressed in configuration
	if [ $SUPPRESS_YN = YES ]
	then
		CHECK_SUPPRESS=`grep "^SUPPRESS_DATE" $CONFIG | grep $PROC_INSTRUMENT| grep $DATE`
		if [ "Q$CHECK_SUPPRESS" != Q ]
		then
			cat > $TMP_DIR/ph_mail <<EOT
This date is configured as SUPPRESS_DATE in $CONFIG. To execute, call 'phoenix -d $DATE' on the command line, 
or remove the entry.
EOT
			mail -s "PHOENIX: Date $DATE not executed since configured as SUPPRESS_DATE in $CONFIG" $OP_ADDRESS <$TMP_DIR/ph_mail
			exit
		fi
	fi

	rm -f $PLOG
	writeLog "$TOOL_NAME v${TOOL_VERSION} started on `whoami`@`hostname` for $RELEASE. All timestamps in UT."
	if [ Q$DATE != Q ]
	then
		writeLog "Call is '$TOOL_NAME -d $DATE'"
	elif [ $DEEP_MODE = YES ]
	then
		 writeLog "Call is '$TOOL_NAME -r $RUN_ID'"
	fi

	if [ Q$OTHER_RESOURCE != Q ]
	then
		writeLog "$TOOL_NAME using resource file $OTHER_RESOURCE"
	fi

	if [ $COMPLETE_MODE = YES ] 
	then
		writeLog "COMPLETE_MODE=$COMPLETE_MODE: $CALSEL_OCA_RULE selected."
		writeLog "COMPLETE_MODE=$COMPLETE_MODE: $DFOS_OCA_RULE used to construct ${DFO_FILE_NAME}_association.h."
		writeLog "COMPLETE_MODE=$COMPLETE_MODE: RAWFILE_SOURCE=$RAWFILE_SOURCE for $DATE."
	fi

	if [ $COMPLETE_MODE = NO ] && [ $CREATE_SCIAB = YES ]
	then
		writeLog "We have met the conditions for CREATE_SCIAB=YES, creating SCIENCE ABs instead of downloading them."
		writeLog "CREATE_SCIAB=YES: $CALSEL_OCA_RULE selected."
		writeLog "CREATE_SCIAB=YES: $DFOS_OCA_RULE used to construct ${DFO_FILE_NAME}_association.h."
		writeLog "CREATE_SCIAB=YES: RAWFILE_SOURCE=$RAWFILE_SOURCE for $DATE."
	fi

	if [ $QCJOB_CONCENTRATION = YES ] && [ $MONTHLY_MODE = YES ]
	then
		writeLog "QCJOB_CONCENTRATION=YES: The QC jobs for this date are executed with all others from the same month."
	fi

	if [ $MCAL_MODE = YES ] && [ $MONTHLY_MODE = YES ]
	then
		writeLog "[Up to the processing, the log applies to the whole month worth of data.]"

		LAST_DATE1=`qcdate $DATE -$POOL_SIZE`
		LAST_DATE=`qcdate $LAST_DATE1 +1`
		writeLog "1. Prepare $MODE processing ($LAST_DATE...$DATE) ..."
	elif [ $MCAL_MODE = YES ] && [ $MONTHLY_MODE = NO ]
	then
		POOL_SIZE=1
		LAST_DATE=$DATE
		writeLog "1. Prepare $MODE processing ($DATE) ..."
	fi
	
	if [ ! -d $DFO_AB_DIR/TMP ]
	then
		mkdir $DFO_AB_DIR/TMP
	fi

	rm -f $DFO_AB_DIR/TMP/* 2>/dev/null
	rm -f $TMP_DIR/ph_list_ABs* $TMP_DIR/ph_rawtypes
# remove old prod_comments since many are auto-filled by pgi's and it's better to refresh
	rm -f $DFO_LST_DIR/PRODCOM/prod_comment_$DATE.txt	

# =======================================================================
# 1.1 MCAL_MODE = NO (science processing): 
#     download ABs via scp from configured data source,
#     OR (after $DFOS_NOAB_DATE, because of stop of SCIENCE AB production in QC1):
#     create SCIENCE ABs
# =======================================================================

	if [ $COMPLETE_MODE = YES ] || [ $CREATE_SCIAB = YES ]
	then
# checking for existence: not safe enough
		rm -rf $DFO_HDR_DIR/$DATE
		writeLog "1.1 No download of historical ABs, we scan the hdr directory for science data ..."
		writeLog "1.1.1 Call dataLoader -t h -d $DATE ..."
		dataLoader -t h -d $DATE | sed "s/^.*/	&/" | tee -a $PLOG 

	elif [ Q$DATE != Q ] && [ $MCAL_MODE = NO ] && [ $DEEP_MODE = NO ]
	then
		writeLog "1.1 Download $MODE ABs for $DATE ..."
		if [ $COMPLETE_MODE = YES ]
		then
			writeLog "    This is done for reference, to have the SCIENCE dataset to be exposed to CalSelector later on."
		fi
		writeLog "    ORIG_AB_DIR used: ${DFO_WEB_SERVER}:$ORIG_AB_DIR"
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$DATE/*.ab   $DFO_AB_DIR/TMP 1>/dev/null 2>/dev/null
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$DATE/*.alog $DFO_AB_DIR/TMP 1>/dev/null 2>/dev/null
		chmod u+w $DFO_AB_DIR/TMP/* 2>/dev/null
	elif [ Q$DATE != Q ] && [ $MCAL_MODE = NO ] && [ $DEEP_MODE = YES ]
	then
		writeLog "1.1 Download $MODE ABs for $RUN_ID ..."
		writeLog "    ORIG_AB_DIR used: ${DFO_WEB_SERVER}:$ORIG_AB_DIR"
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$RUN_ID/*.ab   $DFO_AB_DIR/TMP 1>/dev/null
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$RUN_ID/*.alog $DFO_AB_DIR/TMP 1>/dev/null 2>/dev/null

# for the QC procedures; it also goes to logs
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$RUN_ID/overview_*.txt $DFO_AB_DIR 1>/dev/null
		if [ ! -s $DFO_AB_DIR/overview_${RUN_ID}.txt ]
		then
			echo ""
			echo "***ERROR: no overview file found for ${RUN_ID}, can't do. Exit."
			exit -1
		fi
		writeLog "    DEEP_MODE=$DEEP_MODE; dummy DATE=$DATE"

# for TARGET_SELECT=YES: now remove all non-participating ABs
		if [ $TARGET_SELECT = YES ]
		then
			writeLog "    TARGET_SELECT=YES: select $DPC_AB and depending ones ..."
			rm -rf $DFO_AB_DIR/TMP3
			mkdir $DFO_AB_DIR/TMP3
			#for DEP in `grep "^MCALIB" $DFO_AB_DIR/TMP/$DPC_AB | awk '{print $5}' | sed "s/_.*ab//"`
# we go 2 levels deep here, may be insufficient in general!
# a) check for depending ABs of DPC
			for DEP in `grep "^MCALIB" $DFO_AB_DIR/TMP/$DPC_AB | grep VIRTUAL | awk '{print $5}' | sort -u`
			do
# b) check for depending ABs of DEP
				for DEP2 in `grep "^MCALIB" $DFO_AB_DIR/TMP/$DEP | grep VIRTUAL | awk '{print $5}' |  sed "s/_.*ab//" | sed "s/.ab//" | sort -u`
				do
					mv $DFO_AB_DIR/TMP/${DEP2}* $DFO_AB_DIR/TMP3/ 2>/dev/null
				done

				DEP1=`echo $DEP | sed "s/_.*ab//"`
				mv $DFO_AB_DIR/TMP/${DEP}* $DFO_AB_DIR/TMP3/ 2>/dev/null
			done
			rm -f $DFO_AB_DIR/TMP/* 2>/dev/null
			mv $DFO_AB_DIR/TMP3/* $DFO_AB_DIR/TMP/			
		fi
	fi

# =======================================================================
# 1.2 MCAL_MODE = YES (calib processing): create ABs
# 1.2.1 Prepare: Download headers, create report
# =======================================================================

	if [ Q$DATE != Q ] && [ $MCAL_MODE = YES ]
	then
		writeLog "1.1 Create $MODE ABs (MCAL_MODE=YES) ..."

		if [ ! -d $DFO_HDR_DIR/$DATE ]
		then
# Try and download headers, plus call createReport
			writeLog "1.1.1 Call dataLoader -t h -d $DATE ..."
			dataLoader -t h -d $DATE | sed "s/^.*/	&/" >> $PLOG
			writeLog "1.1.2 Call createReport -d $DATE ..."
			createReport -d $DATE | sed "s/^.*/	&/" >> $PLOG
			writeLog "    ... list_${PROC_INSTRUMENT}_${DATE}_data.html and list_${PROC_INSTRUMENT}_${DATE}.txt created."
# call DATA_PGI		
			if [ Q$DATA_PGI != Q ] 
			then
				if [ ! -s $DFO_BIN_DIR/$DATA_PGI ]
				then
					writeLog "ERROR: \$DATA_PGI $DATA_PGI defined in config file but not found under $DFO_BIN_DIR. Check and start again."
					exit
				else
					writeLog "	calling $DATA_PGI ..."
					$DFO_BIN_DIR/$DATA_PGI $DATE | sed "s/^.*/		&/" | tee -a $PLOG
				fi
			fi
		else
			writeLog "1.1.1-1.1.2 skipped, data report existing."
		fi

# =======================================================================
# 1.2.2 MCAL_MODE = YES: fill date memory
# =======================================================================
		writeLog "1.1.3 Fill header directories for pool batch ($POOL_SIZE days) ..."

# delete VCALs and MCALs, to get rid of remnants from earlier processing
		rm -f $DFO_CAL_DIR/VCAL/*hdr 2>/dev/null
		rm -f $DFO_CAL_DIR/MCAL/*hdr 2>/dev/null

# symmetrical around $DATE, in pre-defined bins: 10/15/30 days, to be called in monthly mode in the end!
		rm -f $TMP_DIR/ph_list_origdates $TMP_DIR/ph_list_dates
		COLLECT_DIR=$DFO_HDR_DIR/2099-99-99
		rm -rf $COLLECT_DIR
		mkdir  $COLLECT_DIR

		if [ "$FILTER_RAW" = NO ]
		then
			writeLog "  no filterRaw applied"
		else
			writeLog "  filterRaw is applied (day by day)"
		fi

# begin of VCAL loop
		DAY=1
		VC_DATE=$DATE
		while [ $DAY -le $POOL_SIZE ]
		do
			echo $VC_DATE >> $TMP_DIR/ph_list_dates

# check for headers and reports
			if [ -d $DFO_HDR_DIR/$VC_DATE ] 
			then
				CHECK_EXIST=`ls $DFO_HDR_DIR/$VC_DATE/*hdr 2>/dev/null | head -1` 
				if [ Q$CHECK_EXIST = Q ]
				then
					rm -rf $DFO_HDR_DIR/$VC_DATE	#to avoid dataLoader asking for confirmation
				fi
			fi	

			if [ ! -d $DFO_HDR_DIR/$VC_DATE ] || [ Q$CHECK_EXIST = Q ]
			then
				writeLog "    $VC_DATE: no hdrs found, downloading ..."
				dataLoader -t h -d $VC_DATE | sed "s/^.*/	&/" >> $PLOG
				if [ ! -s $DFO_LST_DIR/list_${PROC_INSTRUMENT}_${VC_DATE}.txt ]
				then
					writeLog "      no report found; calling createReport ..."
					createReport -d $VC_DATE | sed "s/^.*/	[createReport] &/" >> $PLOG
				fi

# call DATA_PGI		
				if [ Q$DATA_PGI != Q ] 
				then
					if [ ! -s $DFO_BIN_DIR/$DATA_PGI ]
					then
						writeLog "ERROR: \$DATA_PGI $DATA_PGI defined in config file but not found under $DFO_BIN_DIR. Check and start again."
						exit
					else
						writeLog "	calling \$DATA_PGI $DATA_PGI ..."
						$DFO_BIN_DIR/$DATA_PGI $VC_DATE | sed "s/^.*/		&/" | tee -a $PLOG
					fi
				fi
			elif [ $FILTER_RAW = YES ]
			then
				writeLog "    $VC_DATE ..."
			fi

# call filterRaw 
			if [ $FILTER_RAW = YES ]
			then
				filterRaw -d $VC_DATE -m FILTER -H |  sed "s/^.*/	[filterRaw] &/" >> $PLOG
			fi

# now we can suppress errors since having no headers is now ok
			ls $DFO_HDR_DIR/$VC_DATE/*hdr 2>/dev/null | sed "s/^.*/& $VC_DATE/" >> $TMP_DIR/ph_list_origdates #for bookkeeping
			cp $DFO_HDR_DIR/$VC_DATE/*hdr $COLLECT_DIR 2>/dev/null

			VC_DATE=`qcdate $DATE -$DAY`
			DAY=`echo $DAY | awk '{print $1+1}'`
		done

# input data pool is now $COLLECT_DIR
# with this trick we can offer all input data to ABbuilder in one go, thereby constructing all dependencies for the pool in a 
# valid way, while preserving the day-by-day structure needed for processing.

# =======================================================================
# 1.2.3 Check for hidden files
# Since dataLoader downloads all files, we have to de-select hidden files
# now. This is of particular importance for the MCAL mode. The query is 
# taken from createReport (0.8.8).
# [Might be also useful for SCIENCE but currently not implemented there.]
# =======================================================================

		writeLog "1.1.4 Check for files hidden in archive ... "
	
		cat > $TMP_DIR/ph_public <<EOT
SELECT
	DP.dp_id
FROM
	bookkeeping..dp_access BK, obs_metadata..data_products DP
WHERE
	BK.file_id = DP.dp_id
AND
	hide_flag is not NULL
AND
	dp_id in (
EOT

		ls $COLLECT_DIR | grep hdr | sed "s/^.*/\"&\"/" | sed "s/.hdr//" | sed "s/^.*/&,/" | sed "$,$ s/,//" >> $TMP_DIR/ph_public	

		cat >> $TMP_DIR/ph_public <<EOT
)
GO
EOT
		rm -f $TMP_DIR/ph_public_results
		isql -Uqc -S$ARCH_SERVER -w999 -P`cat $ARCH_PWD` -i $TMP_DIR/ph_public  | grep -v affected | sed "1,2 d" | grep -v "\-\-\-\-" | sed "/^$/d" > $TMP_DIR/ph_public_results

# result file should be empty, otherwise we have found a hidden file
# Note that this list contains all hidden files, many of which probably would not be used to create ABs anyway.
		if [ -s $TMP_DIR/ph_public_results ]
		then
			for H in `cat $TMP_DIR/ph_public_results`
			do
				rm -f $COLLECT_DIR/${H}.hdr
			done

			writeLog "  ... hidden files found and removed."
		else	
			writeLog "  ABs checked for hidden files, none found."
		fi

# =======================================================================
# 1.2.4 MCAL_MODE = YES: call createAB
# =======================================================================

		writeLog "1.1.5 Check for rejected files ..."
		writeLog "  (files not hidden but known to be useless for automatic $TOOL_NAME processing)"

# we check for list_REJECTED
		if [ ! -s $DFO_MON_DIR/list_REJECTED ]
		then
			cat > $DFO_MON_DIR/list_REJECTED <<EOT
# This is the list of rejected file IDs for release ${RELEASE}.
# It is read by $TOOL_NAME and evaluated to suppress ABs and VCALs
# with those file IDs. It is typically filled for files that are
# not hidden, because e.g. they might still have limited value
# (partly saturated fibre flats that are still useful for localization)
# but are better suppressed for automatic processing.
# file_ID			comment
# ======================================================================
EOT
		fi

		for REJ in `cat $DFO_MON_DIR/list_REJECTED | grep -v "^#" | awk '{print $1}'`
		do
			rm -f $COLLECT_DIR/${REJ}*
		done
		writeLog "    ... done."
 
		writeLog "1.1.6 Create $MODE ABs for period $LAST_DATE...$DATE ..."
		writeLog "  includes creating VCALs for the pool (takes some time)"

		if [ $OCA_SCHEME = VERSIONED ]
		then
			checkOCAConfig
		fi

		createAB -d 2099-99-99 -m $MODE -a -D 2099-99-99 | sed "s/^.*/	&/" >> $PLOG

# check for VCALs being created, send a mail if none found (unless configured to be skipped)
# likely to be obsolete since createAB removes them by itself!
		if [ Q$CHECK_VCAL != QNO ]
		then
			CHECK_VCAL=`ls $DFO_CAL_DIR/VCAL/r.*hdr 2>/dev/null | head -1`
			if [ Q$CHECK_VCAL = Q ]
			then
				MONTH=`echo $DATE | cut -c1-7`
				echo "No VCALs found in $DFO_CAL_DIR/VCAL. Check if createAB works correctly!" > $TMP_DIR/ph_vcal
				mail -s "PHOENIX : no VCALs found in $DFO_CAL_DIR/VCAL for $MONTH" $OP_ADDRESS <$TMP_DIR/ph_vcal
			fi
		fi

# get IDs of ABs hidden by filterRaw (just in case we need it!)
		#writeLog "... done"
		rm -f $TMP_DIR/ph_filterRaw
		grep "configured RAW_MINNUM:" $PLOG | awk '{print $2}' | sed "s/ab:/ab/" | sort -u > $TMP_DIR/ph_filterRaw
		if [ -s $TMP_DIR/ph_filterRaw ]
		then
			echo "		- Filtered ABs because of RAW_MINNUM criterion:"
			cat $TMP_DIR/ph_filterRaw 2>/dev/null | sed "s/^.*/	&/" 
		fi

# we still need the ABs in $DFO_AB_DIR/TMP and hence move them back:
		cd $DFO_AB_DIR
		if [ -s $DFO_MON_DIR/AB_list_CALIB_2099-99-99 ]
		then
			cat $DFO_MON_DIR/AB_list_CALIB_2099-99-99 | awk '{print $1}' > $TMP_DIR/ph_list_ABs3 
			for AB in `cat $DFO_MON_DIR/AB_list_CALIB_2099-99-99 | awk '{print $1}'`
			do
# first the BATCH_ID
				sed -i -e "/BATCH_ID/s|CALIB.*|CALIB_${DATE}|" $AB
# then the proper date
				AB1=`echo $AB | sed "s/_.*ab//" | sed "s/.ab//"`
				PROP_DATE=`grep $AB1 $TMP_DIR/ph_list_origdates | awk '{print $2}' | head -1`
# all sections related to properties of this AB
				sed -i -e "/^DATE/s/2099-99-99/${PROP_DATE}/" $AB
				sed -i -e "/^PROD_PATH/s/2099-99-99/${PROP_DATE}/" $AB
				sed -i -e "/DFO_RAW_DIR/s/2099-99-99/${PROP_DATE}/" $AB
# sections related to other files: VIRTUAL products
				rm -f $TMP_DIR/ph_virtualp
				grep "^MCALIB" $AB | grep DFS_PRODUCT | awk '{print $3}' > $TMP_DIR/ph_virtualp
				if [ -s $TMP_DIR/ph_virtualp ]
				then
					for VP in `cat $TMP_DIR/ph_virtualp | sed "s|/| |g" | awk '{print $4}' | sed "s/r.//" | sed "s/_.*//"`
					do
						PROP_DATE=`grep $VP $TMP_DIR/ph_list_origdates | awk '{print $2}' | sort -u`
						sed -i -e "/$VP/s/2099-99-99/${PROP_DATE}/" $AB
					done
				fi

				ALOG=`echo $AB | sed "s/.ab/.alog/"`
				mv $DFO_AB_DIR/$AB $DFO_AB_DIR/TMP/
				mv $DFO_AB_DIR/$ALOG $DFO_AB_DIR/TMP/
			done
			chmod u+w $DFO_AB_DIR/TMP/* 2>/dev/null
		fi

# for later SETUP filtering of ABs
		cd $DFO_AB_DIR/TMP
	fi
	
	writeLog "... done."

# =======================================================================
# 1.3 Data reports (already done for MCAL_MODE=YES but doesn't hurt)
# =======================================================================

	if [ $MCAL_MODE = NO ] && [ $DEEP_MODE = NO ] && [ $HDR_DOWN = YES ]
	then
		if [ ! -d $DFO_HDR_DIR/$DATE ]
		then
			writeLog "1.2.1 Call dataLoader -t h -d $DATE ..."
			dataLoader -t h -d $DATE | sed "s/^.*/	&/" >> $PLOG
		fi
	fi

	if [ -s $DFO_LST_DIR/REPORT/list_${PROC_INSTRUMENT}_${DATE}_data.html ]
	then
		writeLog "1.2 Data report: existing."
# dummy DATE
	elif [ $DEEP_MODE = YES ]
	then
		writeLog "1.2 Data report: skipped for ${DATE}."
	else
		writeLog "1.2 Data report: not existing. Check out ${DFO_WEB_SERVER}:${ORIG_LST_DIR} ..."
		if [ ! -d $DFO_LST_DIR/REPORT ]
		then
			mkdir $DFO_LST_DIR/REPORT
		fi
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_LST_DIR}/list_${PROC_INSTRUMENT}_${DATE}_data.html $DFO_LST_DIR/REPORT/ 1>/dev/null 2>/dev/null
		scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_LST_DIR}/list_${PROC_INSTRUMENT}_${DATE}.txt $DFO_LST_DIR/ 1>/dev/null 2>/dev/null

		if [ -s $DFO_LST_DIR/REPORT/list_${PROC_INSTRUMENT}_${DATE}_data.html ]
        	then
			writeLog "... list_${PROC_INSTRUMENT}_${DATE}_data.html and list_${PROC_INSTRUMENT}_${DATE}.txt downloaded."
		else

# not existing at source? Try and download headers, plus call createReport 
			if [ $HDR_DOWN = NO ]
			then
				writeLog "... not found. Download headers and create list ..."
				writeLog "1.2.1 Call dataLoader -t h -d $DATE ..."
				dataLoader -t h -d $DATE | sed "s/^.*/	&/" >> $PLOG
			fi
			writeLog "1.2.2 Call createReport -d $DATE ..."
			createReport -d $DATE | sed "s/^.*/	&/" >> $PLOG
			writeLog "... list_${PROC_INSTRUMENT}_${DATE}_data.html and list_${PROC_INSTRUMENT}_${DATE}.txt created."
		fi
	fi

# =======================================================================
# 1.4 SCIENCE: Filter ABs for DPR_CATG=SCIENCE (or ANY if configured)
# =======================================================================

	if [ $MCAL_MODE = NO ] && [ Q$RAWFILE_SOURCE != QRAW_HDR ]
	then
		writeLog "1.3 Filter for SCIENCE ..."
		cd $DFO_AB_DIR/TMP
		CHECK_EXIST=`ls | grep \.ab | tail -1`

		if [ Q$CHECK_EXIST != Q ]
		then
# remove non-science ABs
			grep "^DPR_CATG" *.ab | awk '{print $2}' | sort -u > $TMP_DIR/ph_dpr_catg
			if [ $DP_CATG = SCIENCE ]
			then
				for DPC in `cat $TMP_DIR/ph_dpr_catg | grep -v SCIENCE`
				do
					rm `grep "^DPR_CATG" *.ab | grep $DPC | sed "s/.ab.*/\.*/"`
				done
			else
				writeLog "No filtering for SCIENCE done."
			fi
		fi

# =======================================================================
# 1.5 SCIENCE: Filter out pseudo-science 
# =======================================================================
# remove daytime ABs
		writeLog "1.3.1 Pseudo-SCIENCE (daytime) ..."
		CHECK_EXIST=`ls | grep \.ab | tail -1`
		if [ Q$CHECK_EXIST != Q ]
		then
			rm -f $TMP_DIR/ph_list_60
			ls ${DFO_FILE_NAME}*T1[1-9]*.ab > $TMP_DIR/ph_list_60 2>/dev/null
			ls ${DFO_FILE_NAME}*T2[0-1]*.ab >> $TMP_DIR/ph_list_60 2>/dev/null
			if [ -s $TMP_DIR/ph_list_60 ]
			then
				writeLog "Removed pseudo-SCIENCE (daytime):"
				grep ".ab" $TMP_DIR/ph_list_60 | sed "s/^.*/   &/" | tee -a $LOG
				rm `cat $TMP_DIR/ph_list_60`
			fi
		fi

# suppress 060 ABs unless configured
		if [ $ACCEPT_060 = NO ]
		then
			writeLog "1.3.2 Filter for 60.A ..."
			CHECK_EXIST=`ls | grep \.ab | tail -1`
			if [ Q$CHECK_EXIST != Q ]
			then
				rm -f $TMP_DIR/ph_list_60
				grep -l "^OBS_PROG_ID[[:space:]].*60\.A" *.ab > $TMP_DIR/ph_list_60
				grep -l "^OBS_PROG_ID[[:space:]].*Maintenance" *.ab >> $TMP_DIR/ph_list_60
				if [ -s $TMP_DIR/ph_list_60 ]
				then
					writeLog "Removed pseudo-SCIENCE (tests):"
					for AB in `cat $TMP_DIR/ph_list_60`
					do
						OBS=`grep "^OBS_PROG_ID" $AB |  awk '{print $2}'` 
						echo "	$AB	$OBS" | tee -a $LOG
						rm `echo $AB | sed "s/.ab/\.*/"`
					done
				fi
			fi
		else
			writeLog "    Pseudo-SCIENCE with 060 run_IDs accepted (check your config if not OK)."
		fi

	elif ([ $COMPLETE_MODE = YES ] && [ $RAWFILE_SOURCE = RAW_HDR ]) || [ $CREATE_SCIAB = YES ]
	then
		dfits $DFO_HDR_DIR/$DATE/${DFO_FILE_NAME}*T2[2-3]*.hdr 2>/dev/null | fitsort -d dpr.catg obs.prog.id  > $TMP_DIR/ph_sci_hdr
		dfits $DFO_HDR_DIR/$DATE/${DFO_FILE_NAME}*T0*.hdr      2>/dev/null | fitsort -d dpr.catg obs.prog.id >> $TMP_DIR/ph_sci_hdr
		dfits $DFO_HDR_DIR/$DATE/${DFO_FILE_NAME}*T10*.hdr     2>/dev/null | fitsort -d dpr.catg obs.prog.id >> $TMP_DIR/ph_sci_hdr

# Filter for SCIENCE
		if [ $DP_CATG = "SCIENCE" ] && [ -s $TMP_DIR/ph_sci_hdr ]
		then
			writeLog "1.3 Filter for SCIENCE, 60.A ..."
			cat $TMP_DIR/ph_sci_hdr | grep SCIENCE > $TMP_DIR/ph_sci_hdr1
			if [ -s $TMP_DIR/ph_sci_hdr1 ] 
			then
				mv $TMP_DIR/ph_sci_hdr1 $TMP_DIR/ph_sci_hdr 
			fi

# remove pseudo-science 
			if [ $ACCEPT_060 = NO ]
			then
				writeLog "    ACCEPT_060=$ACCEPT_060: Remove pseudo-SCIENCE (e.g. tests)"
				sed -i -e "/60\.A/d" $TMP_DIR/ph_sci_hdr
				sed -i -e "/Maintenance/d" $TMP_DIR/ph_sci_hdr
			else
				writeLog "    ACCEPT_060=$ACCEPT_060: Pseudo-SCIENCE with 060 run_IDs accepted (check your config if not OK)."
			fi	
		else
			 writeLog "	DP_CATG=$DP_CATG: no filtering for SCIENCE done."
		fi
	fi
	
# =======================================================================
# 1.6 SCIENCE: Filter for configured RAW_TYPEs
# =======================================================================

	CHECK_EXIST=`ls | grep \.ab | tail -1`
	writeLog "1.4 Filter by RAW_TYPEs ..."

	if [ Q$CHECK_EXIST != Q ]
	then
		if [ $DP_CATG = SCIENCE ]
		then
			grep -l "^DPR_CATG[[:space:]].*${MODE}" *.ab 2>/dev/null > $TMP_DIR/ph_list_ABs
		else
			ls *.ab > $TMP_DIR/ph_list_ABs
		fi
	fi

	NUM_AB_REJ=0
	if [ $MCAL_MODE = NO ] && [ Q$RAWFILE_SOURCE != QRAW_HDR ]
	then
		grep "^RAW_TYPE" $CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}' | sort -u > $TMP_DIR/ph_rawtypes
		if [ ! -s $TMP_DIR/ph_rawtypes ]
		then
			echo "***ERROR: no RAW_TYPEs configured for $PROC_INSTRUMENT. Check $CONFIG. Exit."
			exit
		fi
	
		writeLog "2. Filter by raw_types ..."
		if [ -s $TMP_DIR/ph_list_ABs ]
		then
			for AB in `cat $TMP_DIR/ph_list_ABs`
			do
				RT=`grep "^RAW_TYPE" $AB | awk '{print $2}'`
				echo "$AB $RT" >> $TMP_DIR/ph_list_ABs1
			done
		
			for RT in `cat $TMP_DIR/ph_rawtypes`
			do
				grep " ${RT}$" $TMP_DIR/ph_list_ABs1 >> $TMP_DIR/ph_list_ABs2
			done
		fi
		
		if [ ! -s $TMP_DIR/ph_list_ABs2 ]
		then
			writeLog "... none found. Exit."
			exit
		else
			cat $TMP_DIR/ph_list_ABs2 | sort -k1,1 > $TMP_DIR/ph_list_ABs3
		fi
		
# list the ones which have not been selected
		for AB in `cat $TMP_DIR/ph_list_ABs`
		do
			CHECK_AB=`grep $AB $TMP_DIR/ph_list_ABs3 | awk '{print $1}'`
			if [ "Q$CHECK_AB" = Q ]
			then
				grep $AB $TMP_DIR/ph_list_ABs1 >> $TMP_DIR/ph_list_ABs4
			fi
		done
			
		if [ -s $TMP_DIR/ph_list_ABs4 ]
		then
			NUM_AB_REJ=`cat $TMP_DIR/ph_list_ABs4 | wc -l`
			writeLog "  $NUM_AB_REJ ABs removed (RAW_TYPEs not configured):"
			cat $TMP_DIR/ph_list_ABs4 | sed "s/ /	/"g | sed "s/^.*/	&/" | tee -a $PLOG
			rm `cat $TMP_DIR/ph_list_ABs4 | awk '{print $1}'`
		else
			NUM_AB_REJ=0
		fi
	elif [ $MCAL_MODE = YES ]
	then
		writeLog "2. Filter by raw_types: skipped for MCAL_MODE=YES."
		if [ -s $TMP_DIR/ph_list_ABs ]
		then
			N_SCI_TOT=`cat $TMP_DIR/ph_list_ABs | wc -l`
		else
			N_SCI_TOT=0
		fi
# no check currently, OCA configuration works selectively anyway
		NUM_AB_REJ=0

# for RAW_HDR: we filter later with OCA
	elif [ Q$RAWFILE_SOURCE = QRAW_HDR ] && [ $COMPLETE_MODE = YES ]
	then
		writeLog "2. COMPLETE_MODE=YES and RAWFILE_SOURCE=RAW_HDR: We filter later with the OCA rules."
	elif [ $CREATE_SCIAB = YES ]
	then
		writeLog "2. CREATE_SCIAB=YES: We filter later with the OCA rules."
	fi

# =======================================================================
# 1.7 Filter by SETUPs
# =======================================================================

	if [ Q$RAWFILE_SOURCE != QRAW_HDR ]
	then

		for AB in `cat $TMP_DIR/ph_list_ABs3 | awk '{print $1}'`
		do
			SETUP=`grep "^RAW_MATCH_KEY" $AB | grep -v UNDEFINED | grep -v TPL.START | grep -v ARCFILE | awk '{print $2}' | sed "s/^.*=//" | tr "\012" "_" | sed "s/_$//"`
			STRING=`grep $AB $TMP_DIR/ph_list_ABs3`
			echo "$STRING $SETUP" >> $TMP_DIR/ph_list_ABs5
		done
	
		rm -f $TMP_DIR/ph_list_setups
		grep "^SETUP" $CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}' > $TMP_DIR/ph_list_setups
	
		if [ ! -s $TMP_DIR/ph_list_setups ] 
		then
			writeLog "3.1 Filter by setups: none configured, all taken."
			cp $TMP_DIR/ph_list_ABs5 $TMP_DIR/ph_list_ABs6
		else
			writeLog "3.1 Filter by setups..."
# this is a positive list: all configured setups are selected ...
			for SETUP in `cat $TMP_DIR/ph_list_setups`
			do
				grep "$SETUP" $TMP_DIR/ph_list_ABs5 >> $TMP_DIR/ph_list_ABs6
			done
			cat $TMP_DIR/ph_list_ABs6 | sort -u > $TMP_DIR/ph_list_ABs60
			mv $TMP_DIR/ph_list_ABs60 $TMP_DIR/ph_list_ABs6
	
# filter the ones which are not in ph_list_ABs6
			for AB in `cat $TMP_DIR/ph_list_ABs3 | awk '{print $1}'`
			do
				CHECK_AB=`grep $AB $TMP_DIR/ph_list_ABs6 | awk '{print $1}'`
				if [ Q$CHECK_AB = Q ]
				then
					grep $AB $TMP_DIR/ph_list_ABs5 >> $TMP_DIR/ph_list_ABs7
					rm -f $AB 
					rm -f `echo $AB | sed "s/.ab/.tab/"`
				fi
			done
			if [ -s $TMP_DIR/ph_list_ABs7 ]
			then
				NUM_AB_rm=`cat $TMP_DIR/ph_list_ABs7 | wc -l`
				writeLog "  $NUM_AB_rm $MODE ABs removed (SETUPs not configured):"
				cat $TMP_DIR/ph_list_ABs7 | sed "s/ /	/g" | sed "s/^.*/	&/" | tee -a $PLOG
			fi
		fi
	
# =======================================================================
# 1.8 move selected ones to $DFO_AB_DIR
# =======================================================================

		for ABR in `cat $TMP_DIR/ph_list_ABs6 | awk '{print $1}' | sed "s/.ab//"`
		do
			mv ${ABR}.* $DFO_AB_DIR
		done

# delete remaining ones
		rm -f $DFO_AB_DIR/TMP/* 2>/dev/null
		cd $DFO_AB_DIR
	fi

# =======================================================================
# 1.9 If COMPLETE_MODE=YES: create all CALIB and SCIENCE ABs
#	Taken from distillery 0.3.6 calselectABs
# =======================================================================

	writeLog "4. Create/edit ABs ..."
	if [ $COMPLETE_MODE = YES ] || [ $CREATE_SCIAB = YES ]
	then
		if [ $COMPLETE_MODE = YES ]
		then
			writeLog "4.1 COMPLETE_MODE=YES: first extract list of science files for CalSelector call ..."
		else
			writeLog "4.1 CREATE_SCIAB=YES: first extract list of science files for CalSelector call ..."
		fi
			
		rm -f $TMP_DIR/ph_raw_science* 
		if [ $RAWFILE_SOURCE = HISTO_AB ]
		then
# prepare the file list for CalSelector
			writeLog "4.1.1 RAWFILE_SOURCE=HISTO_AB: extract from historical ABs ..."
			for AB in `cat $TMP_DIR/ph_list_ABs6 | awk '{print $1}'`
			do
				grep "^RAWFILE" $DFO_AB_DIR/$AB | awk '{print $2}' >> $TMP_DIR/ph_raw_science1
# delete AB, later replaced by new one
				ALOG=`echo $AB | sed "s/.ab/.alog/"`
				rm $DFO_AB_DIR/$AB $DFO_AB_DIR/$ALOG
			done
# RAW_HDR:
		else
			writeLog "4.1.1 RAWFILE_SOURCE=RAW_HDR: extract from list of science headers ..."
			cat $TMP_DIR/ph_sci_hdr | awk '{print $1}' > $TMP_DIR/ph_raw_science1
		fi

		for RF in `cat $TMP_DIR/ph_raw_science1`
		do
			RF1=`filename $RF | sed "s/.fits//" | sed "s/.hdr//"`
			echo $RF1 >> $TMP_DIR/ph_raw_science
		done
		
# calling with local OCA rule
		if [ ! -d $DFO_AB_DIR/CALSELECTOR ]
		then
			mkdir $DFO_AB_DIR/CALSELECTOR
		fi
		if [ -s $DFO_AB_DIR/CALSELECTOR/.calselector.lock ] || [ -s $DFO_AB_DIR/TMP/CALSELECTOR/.calselector.lock ]
		then
        		echo "$DFO_AB_DIR/CALSELECTOR/.calselector.lock or $DFO_AB_DIR/TMP/CALSELECTOR/.calselector.lock found, can't do. Remove and try again."
        		exit
		fi

		rm -f $DFO_AB_DIR/CALSELECTOR/*
		cd $DFO_AB_DIR/CALSELECTOR

# COMPLETE_MODE=YES: CalSelector mode Raw2Raw (since we create CALIB ABs and then SCIENCE ABs)
# CREATE_SCIAB=YES:  CalSelector mode Raw2Master (we create SCIENCE ABs only)
		if [ $COMPLETE_MODE = YES ]
		then
			CALSELMODE=Raw2Raw
		else
			CALSELMODE=Raw2Master
		fi

		if [ ! -e $TMP_DIR/ph_raw_science ]
		then
			touch $TMP_DIR/ph_raw_science
		fi
		writeLog "4.2.1 Calling CalSelector, mode $CALSELMODE, using local and perhaps edited $CALSEL_OCA_RULE  ..."
        	echo "			Call is: CalSelector --conf-file=$DFO_CONFIG_DIR/CALSELECTOR/CSConfiguration.properties --input-files=FILE --input-file=$TMP_DIR/ph_raw_science --mode=$CALSELMODE --oca-rules-file=$CALSEL_OCA_RULE" >> $PLOG
		echo "			CalSelector log: " >> $PLOG
	       	CalSelector --conf-file=$DFO_CONFIG_DIR/CALSELECTOR/CSConfiguration.properties --input-files=FILE --input-file=$TMP_DIR/ph_raw_science --mode=$CALSELMODE --oca-rules-file=$CALSEL_OCA_RULE | sed "s/^.*/	&/" | tee -a $PLOG
# with ignore-certification enabled:
	       	#CalSelector --conf-file=$DFO_CONFIG_DIR/CALSELECTOR/CSConfiguration.properties --input-files=FILE --input-file=$TMP_DIR/ph_raw_science --mode=Raw2Raw --oca-rules-file=$CALSEL_OCA_RULE --ignore-certification=true | sed "s/^.*/	&/" | tee -a $PLOG

# check for certified=false
		rm -f $TMP_DIR/ph_calsel_false*
		DATE1=`qcdate $DATE +1`
		grep "certified=" ${DFO_FILE_NAME}.${DATE}T2*xml   2>/dev/null | grep -v true | awk '{print $1}' > $TMP_DIR/ph_calsel_false
		grep "certified=" ${DFO_FILE_NAME}.${DATE1}T0*xml  2>/dev/null | grep -v true | awk '{print $1}' >> $TMP_DIR/ph_calsel_false
		grep "certified=" ${DFO_FILE_NAME}.${DATE1}T10*xml 2>/dev/null | grep -v true | awk '{print $1}' >> $TMP_DIR/ph_calsel_false

		if [ -s $TMP_DIR/ph_calsel_false ]
		then
			cat $TMP_DIR/ph_calsel_false | sed "s/<assoc.*//" | sort -u | sed "s/:$//" | sed "s/xml/ab/" > $TMP_DIR/ph_calsel_false1	
			if [ $RAWFILE_SOURCE = HISTO_AB ]
			then
				echo "Warning: uncertified calib files found, better switch to RAWFILE_SOURCE=RAW_HDR" > $TMP_DIR/ph_mail
			else
				echo "Warning: uncertified calib files found" > $TMP_DIR/ph_mail
			fi
			echo "ABs:" >> $TMP_DIR/ph_mail
			cat $TMP_DIR/ph_calsel_false1 >> $TMP_DIR/ph_mail
			
			cat $TMP_DIR/ph_mail | sed "s/^.*/	&/" | tee -a $PLOG
			mail -s "PHOENIX: uncertified files found for $DATE" $OP_ADDRESS <$TMP_DIR/ph_mail
		fi
		echo "			End of CalSelector log" >> $PLOG 

# no transformation of xml files needed
# create easy-to-parse TXT file from XML file (takes long so we have it configurable)
		if [ Q$CREATE_TXT_FILE = QNO ]
		then
	      		writeLog "4.2.2 Creating TXT files: skipped"
		else
	      		writeLog "4.2.2 Creating TXT files from XML files (takes a while) ..."
			for DP_ID in `cat $TMP_DIR/ph_raw_science`
			do
				XML=${DP_ID}.xml
				TXT=${DP_ID}.txt
				echo "	$TXT ..."

# XML exists only if single or first one in a template
				if [ ! -s $XML ]
				then
					continue
				fi

				cat > $TXT <<EOT
# CalSelector output (specified: Raw2Raw)
# ========================================
# This is the list of all raw files, and static calibration files, directly or indirectly linked to the SCIENCE raw file.
# We list:
# - DprType (CALIB or SCIENCE),
# - dpId (raw file ID or static calibration ID where static calibrations start with '${MCAL_CODE}'),
# - fileCategory=RAW_TYPE,
# - Parent = dpID to which these calibrations belong,
# - Complete (true if all configured file_types found),
# - assocCategory (DO_category),
# - name of the data set,
# - Type (main: required files; auxiliary: optional additional files),
# - Certified (true: certified; false: unknown quality, not necessarily bad!)
# - matchType (calib_plan: within configured validity; extended: outside configured validity but still found)
					
EOT
				XMLFormatter $XML |\
  awk '{
 if ($1 == "SCIENCE" ) {print $1"       "$2"    "$3"    "first_raw"     "$4"    "$5"    "$6}
 if ($1 != "SCIENCE" ) {print $0}
        }' first_raw=${DP_ID} >> $TXT

# store info about certified for raw calib files
				rm -f $TMP_DIR/ph_raw_calib
				grep "CALIB" $TXT | grep -v MISSING | grep $DFO_FILE_NAME | grep -v M.$DFO_FILE_NAME | sort -u | awk '{print $2,$8}' >> $TMP_DIR/ph_raw_calib
				grep "Missing" $XML 2>/dev/null |\
 awk '{print "CALIB     MISSING ",$3,$5,$6,$7,$8,$9}' |\
 sed "s|\"/>||" \
>> $TXT
			done
		fi

		if [ $COMPLETE_MODE = YES ]
		then
			writeLog "4.2.3 Extracting the raw calib names from the XML files ..."
			rm -f $TMP_DIR/ph_raw_calib*
			for DP_ID in `cat $TMP_DIR/ph_raw_science`
			do
				XML=${DP_ID}.xml
				grep "file category=" $XML | sed "s/^.*name=//" | grep -v M.${DFO_INSTRUMENT} | sed "s/\"//g" | sed "s/..$//" >> $TMP_DIR/ph_raw_calib1
			done
			cat $TMP_DIR/ph_raw_calib1 | sort -u > $TMP_DIR/ph_raw_calib

# add HC CALIBs for $DATE if configured (recommended)
			CHECK_HC_CALIBS=`grep "^COMPLETE_ADD_HC_CALIBS" $CONFIG | head -1`
			if [ "Q$CHECK_HC_CALIBS" != Q ]
			then
				writeLog "      We add the configured HC calibs for $DATE ..."
				echo "cd $DFO_HDR_DIR/$DATE" > $TMP_DIR/ph_extract_HC
				grep "^COMPLETE_ADD_HC_CALIBS" $CONFIG | sed "s/\&\&$//" | sed "s/^.*\&\&//" | sed "s/^.*/dfits *hdr | fitsort -d &/" >> $TMP_DIR/ph_extract_HC
				source $TMP_DIR/ph_extract_HC 2>/dev/null | awk '{print $1}' | sed "s/.hdr//" > $TMP_DIR/ph_raw_calib1
				cat $TMP_DIR/ph_raw_calib1 2>/dev/null | sed "s/^.*/  &/"
	
				cat $TMP_DIR/ph_raw_calib >> $TMP_DIR/ph_raw_calib1
				cat $TMP_DIR/ph_raw_calib1 | sort -u > $TMP_DIR/ph_raw_calib
			fi
# extract masters
		elif [ $CREATE_SCIAB = YES ]
		then
			writeLog "4.2.3 Extracting the mcalib names from the XML files ..."
			rm -f $TMP_DIR/ph_mcalib*
			for DP_ID in `cat $TMP_DIR/ph_raw_science`
			do
				XML=${DP_ID}.xml
				grep "file category=" $XML | sed "s/^.*name=//" | grep M.${DFO_INSTRUMENT} | sed "s/\"//g" | sed "s/..$//" >> $TMP_DIR/ph_mcalib1
			done
			cat $TMP_DIR/ph_mcalib1 | sort -u > $TMP_DIR/ph_mcalib
		fi

# check the TXT files for validity info: not enabled
	       	#checkTXT $2
		mv *xml *txt $DFO_AB_DIR 2>/dev/null
		cd $DFO_AB_DIR

		if [ $COMPLETE_MODE = YES ]
		then
			writeLog "4.2.4 We download the raw calib headers into $DFO_RAW_DIR/$DATE ..."
			ORIG_PWD=`pwd`
			mkdir $DFO_RAW_DIR/$DATE 2>/dev/null
# cleanup headers each time to have consistent start
			rm -f $DFO_RAW_DIR/$DATE/*hdr
			cd $DFO_RAW_DIR/$DATE
	
			for RAW in `cat $TMP_DIR/ph_raw_calib | awk '{print $1}' | sort -u`
			do
# same date: copy hdr from $DFO_HDR_DIR
				if [ ! -s ${RAW}.fits ] && [ ! -s ${RAW}.hdr ]
				then
					cp $DFO_HDR_DIR/$1/${RAW}.hdr ./ 2>/dev/null
				fi
	
# if not found (other DATE): download  
	       	         	if [ ! -s ${RAW}.fits ] && [ ! -s ${RAW}.hdr ]
	       	         	then
	               			ngasClient -H ${RAW}.hdr | grep -v "downloaded and successful" | sed "s/^.*/  &/" | tee -a $PLOG
	                	fi
	        	done

			cd $ORIG_PWD

			writeLog "4.2.5 Create the CALIB ABs ..."
			writeLog "      Calling createAB -d $DATE -m CALIB -D $DATE"

			ORIG_HDR_DIR=$DFO_HDR_DIR
       		 	export DFO_HDR_DIR=$DFO_RAW_DIR
# we need to remove all $DFO_CAL_DIR dates because otherwise ABbuilder will prefer MCAL over VCAL; up to 2098 but NOT 2099 because this encodes something else
			rm -rf $DFO_CAL_DIR/1999* $DFO_CAL_DIR/20*[0-8]*
# we also clean up VCAL
			rm -f $DFO_CAL_DIR/VCAL/*

# CALIB ABs could belong to many different SCIENCE DATEs --> add corresponding BATCH_IDs
			ORIG_AB_DIR=$DFO_AB_DIR
			mkdir $DFO_AB_DIR/TMP1 2>/dev/null
			export DFO_AB_DIR=$DFO_AB_DIR/TMP1
			createAB -d $DATE -m CALIB -D $DATE | sed "s/^.*/	&/" | grep -v "Could not determine MODE" | egrep -v "moveProducts|createJob" | tee -a $PLOG

			cd $DFO_AB_DIR
			for AB in `ls *.ab`
			do
				CHECK_EXIST=`grep "^BATCH_ID"  $ORIG_AB_DIR/$AB 2>/dev/null | awk '{print $2}' | grep -v $DATE | sort -u | head -1`
				if [ "Q$CHECK_EXIST" != Q ]
				then
					THIS_BATCHID="CALIB_$DATE"
					for BATCHID in `grep "^BATCH_ID"  $ORIG_AB_DIR/$AB 2>/dev/null | awk '{print $2}' | grep -v $DATE | sort -u`
					do
						sed -i -e "/^BATCH_ID.*${THIS_BATCHID}/a BATCH_ID	${BATCHID}" $AB
					done
				fi
			done
			mv * $ORIG_AB_DIR
			export DFO_AB_DIR=$ORIG_AB_DIR
			cd $ORIG_PWD
			rm -f ABbuilder.log

     			#export DFO_HDR_DIR=$ORIG_HDR_DIR
# otherwise all filtering etc. gets lost
        		export DFO_HDR_DIR=$DFO_RAW_DIR

		elif [ $CREATE_SCIAB = YES ]
		then
			writeLog "4.2.4 We download the mcalib headers into $DFO_CAL_DIR/$DATE ..."
			ORIG_PWD=`pwd`
			mkdir $DFO_CAL_DIR/$DATE 2>/dev/null
# cleanup headers each time to have consistent start
			rm -f $DFO_CAL_DIR/$DATE/*hdr
			cd $DFO_CAL_DIR/$DATE

# query	for origfile
			cat > $TMP_DIR/ph_query_exist <<EOT
select
	dp_id,
	origfile
from
	qc_metadata..qc_products
where
	dp_id in (
EOT
			cat $TMP_DIR/ph_mcalib | sed "s/^.*/\"&\" ,/" | sed "$,$ s/,//" >> $TMP_DIR/ph_query_exist
			cat >> $TMP_DIR/ph_query_exist <<EOT
)
go
EOT
			rm -f $TMP_DIR/ph_query_out
			isql -Uqc -P`eval "cat ${ARCH_PWD}"` -S${ARCH_SERVER} -w 999 -i $TMP_DIR/ph_query_exist | grep "M.${DFO_INSTRUMENT}" > $TMP_DIR/ph_query_out

# download the product hdrs
			cd $DFO_CAL_DIR/$DATE
			# download MCAL headers with DFOS-v1 naming scheme
			for MC in `cat $TMP_DIR/ph_query_out | awk '{print $2}' | sort -u | grep ${MCAL_CODE}_`
			do
				ngasClient -P ${MC} | grep -v "downloaded and successful" | sed "s/^.*/ &/" | tee -a $PLOG
			done
			# download MCAL header with qcFlow naming scheme
			for MC in `cat $TMP_DIR/ph_query_out | grep -v ${MCAL_CODE}_ | awk '{print $1}' | sort -u | sed "s/^.*/&.hdr/"`
			do
				ngasClient -P ${MC} | grep -v "downloaded and successful" | sed "s/^.*/ &/" | tee -a $PLOG
			done

			writeLog "	... all mcalib headers downloaded into $DFO_CAL_DIR/$DATE."
			cd $ORIG_PWD

# we clean up VCAL
			rm -f $DFO_CAL_DIR/VCAL/*
		fi

		writeLog "4.2.5 Create the SCIENCE ABs (using $DFOS_OCA_RULE) ..."	
		writeLog "      Calling createAB -d $DATE -m SCIENCE -D $DATE"

		if [ $DP_CATG = "ANY" ]
		then
# do not suppress 60.A run ids, operate as in Calchecker CAL4CAL; WHU 2020-11-10
			export CREATEAB_NOTCHECK=TRUE
			writeLog "	DP_CATG = $DP_CATG accept 60.A"
		fi

		createAB -d $DATE -m SCIENCE -D $DATE | sed "s/^.*/	&/" | grep -v "Could not determine MODE" | egrep -v "moveProducts|createJob" | tee -a $PLOG

# if RAWFILE_SOURCE = RAW_HDR: create ph_list_ABs6 for the AB_PGI
		cp $DFO_MON_DIR/AB_list_SCIENCE_$DATE $TMP_DIR/ph_list_ABs6
		writeLog "... done. From here on we work with the new ABs as if they were downloaded ABs from DFOS_OPS."
	fi

# =======================================================================
# 1.10 Modify ABs
# 1.10.1 Check content of historical ABs
#       Outdated content: not an issue for current ABs
# =======================================================================

	if [ $CREATE_SCIAB = NO ]
	then
		writeLog "4.3 Edit ABs ..."
	fi

	if [ $MCAL_MODE = YES ] 
	then
		writeLog "4.3.1 Clean ABs: skipped for MCAL_MODE=YES."
	elif [ $COMPLETE_MODE = YES ]
	then
		writeLog "4.3.1 COMPLETE_MODE=YES: add CIVIL_DATE to CALIB ABs"
		for AB in `cat $DFO_MON_DIR/AB_list_CALIB_$DATE | awk '{print $1}'`
		do
			CIVIL_DATE=`echo $AB | sed "s/${DFO_FILE_NAME}.//" | cut -c1-10`
			HH=`echo $AB | sed "s/${DFO_FILE_NAME}.//" | cut -c12-12`
			if [ $HH = 1 ] || [ $HH = 0 ]
			then
				CIVIL_DATE=`qcdate $CIVIL_DATE -1`
			fi
			sed -i -e "/^DATE/a CIVIL_DATE	$CIVIL_DATE" $DFO_AB_DIR/$AB
		done
	elif [ $CREATE_SCIAB = NO ]
	then
		writeLog "4.3.1 Clean ABs from outdated content (RB_CONTENT etc.) ..." 
	
		for AB in `cat $TMP_DIR/ph_list_ABs6 | awk '{print $1}'`
		do
	
# remove historical obsolete sections: RB_CONTENT and further associated information
			cat $AB |\
	 grep -v "RB section starts here" |\
	 grep -v "^RB_CONTENT" |\
	 grep -v "defined triggers" |\
	 grep -v "^TRIGGER" |\
	 grep -v "^RB_NAME"  |\
	 grep -v "^PRODUCTS" |\
	 grep -v "^QC1_PAR"  |\
	 grep -v "^QC1_DB" |\
	 grep -v "^FURTHER_" |\
	 grep -v "associated QC1 parameters" |\
	 sed "s/^STATUS/AB_STATUS/" |\
	 sed "s/RBS_ROOT_NAME/PROD_ROOT_NAME/" |\
	 sed "s|\$DFO_RAW_DIR|\${DFO_RAW_DIR}|" |\
	 sed "s|\$DFO_CAL_DIR|\${DFO_CAL_DIR}|" \
	> $TMP_DIR/new_ab
			mv $TMP_DIR/new_ab $AB

			CHECK_PROD_PATH=`grep "^PROD_PATH" $AB | awk '{print $1}'`
			if [ Q$CHECK_PROD_PATH = Q ]
			then
				RAW_TYPE=`grep "^RAW_TYPE" $AB | awk '{print $2}'`
				cat $AB | awk '{ if ($1=="LOG_NAME") {print $0; print "PROD_PATH	${DFS_PRODUCT}/"raw_type"/"date} else {print $0}}' raw_type=$RAW_TYPE date=$DATE > $TMP_DIR/new_ab
				mv $TMP_DIR/new_ab $AB
			fi

# check for line with pipeline product path (needed for updateAB)
               		CHECK_PATH=`grep "pipeline product path" $AB`
                	if [ "Q$CHECK_PATH" = Q ]
                	then
                       		cat $AB | awk '{ if ($1=="PROD_PATH") {print ""; print "# pipeline product path"; print $0} else {print $0}}' > $TMP_DIR/new_AB
                        	mv $TMP_DIR/new_AB $AB
                	fi

# check for PROCESS_STATUS
			CHECK_PROCESS=`grep "^PROCESS_STATUS" $AB | awk '{print $1}'`
			if [ Q$CHECK_PROCESS = Q ]
			then
				cat $AB | awk '{ if ($1=="COMPLETENESS") {print $0; print "PROCESS_STATUS	CREATED"} else {print $0}}' > $TMP_DIR/new_ab
				mv $TMP_DIR/new_ab $AB
			fi

# check for TEXEC
			CHECK_TEXEC=`grep "^TEXEC" $AB | awk '{print $1}'`
			if [ Q$CHECK_TEXEC = Q ]
			then
				cat $AB | awk '{ if ($2=="further" && $3=="associated") {print $0; print "TEXEC	0"} else {print $0}}' > $TMP_DIR/new_ab
				mv $TMP_DIR/new_ab $AB
			fi

			CHECK_PARAM_C=`grep "parameters for processing" $AB | awk '{print $1}'`
			if [ Q$CHECK_PARAM_C = Q ] 
			then
				cat $AB | awk '{ if ($2=="further" && $3=="associated") {print "# parameters for processing"; print $0} else {print $0}}' > $TMP_DIR/new_ab
				mv $TMP_DIR/new_ab $AB	
			fi

# check for SOF section (could be missing due to bug in historical ABs): repair
			CHECK_SOF=`grep "^SOF_CONTENT" $AB | head -1 | awk '{print $1}'`
			if [ Q$CHECK_SOF = Q ]
			then
				cat >> $AB <<EOT
# ========== SOF section starts here ==========
EOT
				grep "^RAWFILE" $AB | awk '{print $2,$3}' | sed "s/^.*/SOF_CONTENT	& RAW/"   >> $AB
				grep "^MCALIB"  $AB | awk '{print $3,$4}' | sed "s/^.*/SOF_CONTENT	& CALIB/" >> $AB
				writeLog "    added in $AB: SOF_CONTENT"
			fi
		done
	fi

# =======================================================================
# 1.10.2 Call AB_PGI if configured
# =======================================================================

	cat $TMP_DIR/ph_list_ABs6 | sort -u > $TMP_DIR/ph_list_ABs61
	mv $TMP_DIR/ph_list_ABs61 $TMP_DIR/ph_list_ABs6
	cd $DFO_AB_DIR

	if [ Q$AB_PGI != Q ] 
	then
		if [ ! -s $DFO_BIN_DIR/$AB_PGI ]
		then
			writeLog "ERROR: \$AB_PGI $AB_PGI defined in config file but not found under $DFO_BIN_DIR. Check and start again."
			exit
		fi

		writeLog "4.4 Apply configured AB_PGI: \$DFO_BIN_DIR/$AB_PGI ..."
		case $DEEP_MODE in
		 "NO"  ) $DFO_BIN_DIR/$AB_PGI $DATE | sed "s/^.*/	&/" | tee -a $PLOG ;;
		 "YES" ) $DFO_BIN_DIR/$AB_PGI $RUN_ID $DATE | sed "s/^.*/	&/" | tee -a $PLOG ;;
		esac
		writeLog "... done."
	else
		writeLog "4.4 AB_PGI: not configured."	
	fi

	NUM_AB=`cat $TMP_DIR/ph_list_ABs6 | wc -l`
	if [ $NUM_AB = 0 ] 
	then
		writeLog "... no AB left. Exit."
		exit
	fi

	writeLog "  $NUM_AB $MODE ABs found for processing:"
	cat $TMP_DIR/ph_list_ABs6 | sed "s/ /	/g" | sed "s/^.*/	&/" | tee -a $PLOG
	N_SCI_TOT=$NUM_AB

# =======================================================================
# 1.11 Catch historical AB comments (MCAL_MODE=YES or COMPLETE_MODE=YES)
# =======================================================================

	if [ $MCAL_MODE = YES ] || [ $COMPLETE_MODE = YES ]
	then
		writeLog "4.5 Get historical AB comments ..."

		cat > $TMP_DIR/ph_get_prod_comments <<EOT
SELECT
	ab_name ,
	comment
FROM
	qc1..prod_comments
WHERE
	ab_name in (
EOT
	if [ $MCAL_MODE = YES ]
	then
		cat $TMP_DIR/ph_list_ABs6 | awk '{print $1}' | sed "s/^.*/\"&\",/" | sed "$,$ s/,//" >> $TMP_DIR/ph_get_prod_comments
	else
		cat $DFO_MON_DIR/AB_list_CALIB_$DATE 2>/dev/null | awk '{print $1}' | sed "s/^.*/\"&\",/" | sed "$,$ s/,//" >> $TMP_DIR/ph_get_prod_comments
	fi

	cat >> $TMP_DIR/ph_get_prod_comments <<EOT
)
GO
EOT
		rm -f $TMP_DIR/ph_get_prod_results
		isql -Uqc -S$ARCH_SERVER -w999 -P`cat $ARCH_PWD` -i $TMP_DIR/ph_get_prod_comments  | grep -v affected | sed "1,2 d" | grep -v "\-\-\-\-" | sed "/^$/d" > $TMP_DIR/ph_get_prod_results
		if [ -s $TMP_DIR/ph_get_prod_results ]
		then
			writeLog "    ... found."
		else
			writeLog "    ... none found."
		fi
	fi

# =======================================================================
# 1.12 Catch CERTIF/REJ flags (MCAL_MODE=YES)
# =======================================================================

	if [ $MCAL_MODE = YES ] || [ $COMPLETE_MODE = YES ]
	then
		writeLog "4.6 Get certified/scored info from historical ABs ..."

# could be versioned; is optional for MCAL_MODE=YES
		if [ $MCAL_MODE = YES ]
		then
			grep "^ORIG_AB_DIR"	$CONFIG | grep $PROC_INSTRUMENT | awk '{print $3,$4}' | sort -u -k2 > $TMP_DIR/ph_ORIG_AB_DIR
			if [ -s $TMP_DIR/ph_ORIG_AB_DIR ]
			then
				for REF in `cat $TMP_DIR/ph_ORIG_AB_DIR | awk '{print $2}'`
				do
					REF1=`echo $REF | sed "s/-//g"`	
					if [ Q$DATE != Q ]
					then
						DATE1=`echo $DATE | sed "s/-//g"`
					else
						MONTH1=`echo $MONTH | sed "s/-//g"`
						DATE1=`echo ${MONTH1}01`
					fi
	
					if [ $DATE1 -le $REF1 ]
					then
						ORIG_AB_DIR=`grep $REF $TMP_DIR/ph_ORIG_AB_DIR | awk '{print $1}'`
						break
					fi
				done
			fi

# COMPLETE_MODE = YES: 
		else
			ORIG_AB_DIR=$CAL_ORIG_AB_DIR
			ORIG_PLT_DIR=$CAL_ORIG_PLT_DIR
		fi
		
		if [ Q$ORIG_AB_DIR = Q ]
		then
			writeLog "... nothing configured."
		else
			if [ ! -d $DFO_AB_DIR/HISTO ]
			then
				mkdir $DFO_AB_DIR/HISTO
				cat > $DFO_AB_DIR/HISTO/AAREADME <<EOT
This directory is created by $TOOL_NAME in order to store historical ABs downloaded from ${DFO_WEB_SERVER}, to query them for
certified/processed information. The content is of temporary value only, during $TOOL_NAME runtime.
EOT
			fi

			if [ $COMPLETE_MODE = YES ]
			then
				writeLog "    Download historical QC information for CALIBs from ${DFO_WEB_SERVER}:${ORIG_AB_DIR} ..."
				rm -f $TMP_DIR/ph_list_dates*
				for D in `cat $DFO_MON_DIR/AB_list_CALIB_$DATE | awk '{print $1}' | sed "s/${DFO_FILE_NAME}.//" |  cut -c1-10 | sort -u`
				do
					qcdate $D -1 >> $TMP_DIR/ph_list_dates1
					echo $D >> $TMP_DIR/ph_list_dates1
				done
				cat $TMP_DIR/ph_list_dates1 | sort -u > $TMP_DIR/ph_list_dates
			else
				writeLog "    Download historical ABs and TLOGs from ${DFO_WEB_SERVER}:${ORIG_AB_DIR} ..."
			fi

			if [ $OVERRIDE_HISTO = YES ]
			then
				rm -f $DFO_AB_DIR/HISTO/*.ab $DFO_AB_DIR/HISTO/*.tlog
			else
				writeLog "    We don't clean up $DFO_AB_DIR/HISTO since you have called the tool with option -x. Don't forget to clean up eventually!"
				echo "We don't clean up $DFO_AB_DIR/HISTO since you have called the tool with option -x. Don't forget to clean up eventually!" > $TMP_DIR/ph_mail
				mail -s "PHOENIX: Reminder $DFO_AB_DIR/HISTO" $OP_ADDRESS <$TMP_DIR/ph_mail
			fi

			if [ $COMPLETE_MODE = YES ]
			then
				rm -f $DFO_AB_DIR/HISTO/*
				for D1 in `cat $TMP_DIR/ph_list_dates`
				do
					writeLog "	$D1 ..."
					#scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_PLT_DIR}/$D1/*${ORIG_PLT_EXT} ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*\.ab ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*.tlog ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*.html ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*\.tab $DFO_AB_DIR/HISTO 1>/dev/null 2>/dev/null
# no plots!
					scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*\.ab ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*.tlog ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*.html ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/$D1/*\.tab $DFO_AB_DIR/HISTO 1>/dev/null 2>/dev/null
				done
				rm -f $DFO_AB_DIR/HISTO/status*
				MONTH_SCP=$DATE		# just a placeholder
			else
# all info per month with one scp: faster
				MONTH_SCP=`cat $TMP_DIR/ph_list_dates | cut -c1-7 | sort -u`
				scp -o $SCP_TIMEOUT -o BatchMode=yes ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/${MONTH_SCP}*/*\.ab ${DFO_WEB_SERVER}:${ORIG_AB_DIR}/${MONTH_SCP}*/*.tlog $DFO_AB_DIR/HISTO 1>/dev/null 2>/dev/null
			fi	

			chmod u+w $DFO_AB_DIR/HISTO/* 2>/dev/null
		fi

# Check if any found
		rm -f $TMP_DIR/ph_old_info_$MONTH_SCP
		CHECK_EXIST=`ls $DFO_AB_DIR/HISTO/ | head -1 | awk '{print $1}'`
		if [ Q$CHECK_EXIST != Q ]
		then
# find status info for the new ABs in historical ABs; multiple entries possible-->last one taken
			if [ $MCAL_MODE = YES ] 
			then
				REF_LIST=$TMP_DIR/ph_list_ABs6
			else
				REF_LIST=$DFO_MON_DIR/AB_list_CALIB_$DATE
			fi

			for AB in `cat $REF_LIST | awk '{print $1}'`
			do
				PROCESS_STATUS=""
				SCORE_STATUS=""
				CERTIF_STATUS=""
				REJ_STATUS=""

				TLOG=`echo $AB | sed "s/.ab/.tlog/"`
				if [ -s $DFO_AB_DIR/HISTO/$AB ]
				then
# either there is this flag ...
					PROCESS_STATUS=`grep "^PROCESS_STATUS" 	$DFO_AB_DIR/HISTO/$AB | awk '{print $2}' | tail -1`
					if [ Q$PROCESS_STATUS = Q ]
					then
# ... or we check for a product listed in PRODUCTS section
						PROCESS_STATUS=`grep "^PRODUCTS" $DFO_AB_DIR/HISTO/$AB | awk '{print $2}' | grep -v NONE | tail -1`
						if [ Q$PROCESS_STATUS != Q ]
						then
							PROCESS_STATUS=SUCCESSFUL
						fi
					fi

					SCORE_STATUS=`grep   "^AB_STATUS" 	$DFO_AB_DIR/HISTO/$AB | grep scored | awk '{print $3}' | tail -1` 

# either there is the flag 'certified' ...
					CERTIF_STATUS=`grep  "^AB_STATUS" 	$DFO_AB_DIR/HISTO/$AB | grep certified | awk '{print $3}' | tail -1`
					if [ Q$CERTIF_STATUS = Q ]
					then
# ... or we check for M. entry in PRODUCTS --> ingested meaning also certified
						CERTIF_STATUS=`grep "^PRODUCTS" $DFO_AB_DIR/HISTO/$AB | awk '{print $3}' | grep -v NONE | tail -1`
						if [ Q$CERTIF_STATUS != Q ]
						then
							CERTIF_STATUS=certified
						fi
					fi
					REJ_STATUS=`grep     "^AB_STATUS" 	$DFO_AB_DIR/HISTO/$AB | grep rejected | awk '{print $3}' | tail -1`
					PROD_ROOT_NAME=`grep "^PROD_ROOT_NAME" 	$DFO_AB_DIR/HISTO/$AB | awk '{print $2}'`
				fi

				SCORE_RESULT=""
				if [ -s $DFO_AB_DIR/HISTO/$TLOG ]
				then
					SCORE_RESULT=`grep "^TOTAL_SCORE"  $DFO_AB_DIR/HISTO/$TLOG | awk '{print $2}'`
                			TOTAL_NUMBER=`grep "^TOTAL_NUMBER" $DFO_AB_DIR/HISTO/$TLOG 2>/dev/null | awk '{print $2}'`
				fi

				if [ Q$PROCESS_STATUS = Q ]
				then
					PROCESS_STATUS=0
				else
					case $PROCESS_STATUS in
					 "CREATED"    ) PROCESS_STATUS=0 ;;
					 "SUCCESSFUL" ) PROCESS_STATUS=2 ;;
					 "FAILED" )     PROCESS_STATUS=1 ;;
					esac
				fi

# TOTAL_NUMBER=0 means effectively no score
				if ( [ Q$SCORE_STATUS = Q ] && [ Q$SCORE_RESULT = Q ] ) || ( [ Q$TOTAL_NUMBER = Q ] || [ Q$TOTAL_NUMBER = Q0 ] )
				then
					SCORE_STATUS=0
				else
					case $SCORE_RESULT in
					 0 ) SCORE_STATUS=2 ;;
					 * ) SCORE_STATUS=1 ;;
					esac
				fi

				if [ Q$REJ_STATUS = Qrejected ]
				then
					CERTIF_STATUS=1
				elif ( [ Q$CERTIF_STATUS = Qcertified ] || [ Q$CERTIF_STATUS = Qauto-certified ] )
				then
					CERTIF_STATUS=2
				else
					CERTIF_STATUS=0
				fi

				if [ Q$PROD_ROOT_NAME != Q ]
				then
# better to produce new QC plots; unclear what to do if not possible
					#PLT_EXIST=`ls $DFO_AB_DIR/HISTO/${PROD_ROOT_NAME}*${ORIG_PLT_EXT} 2>/dev/null | head -1`
					#if [ "Q$PLT_EXIST" = Q ]
					#then
						PLT_EXIST=NONE
					#fi
				else
					 PLT_EXIST=NONE
				fi

				SCORE_INFO=`echo $AB | sed "s/.ab/.html/"`
				if [ -s $DFO_AB_DIR/HISTO/$SCORE_INFO ]
				then
					SCORE_EXIST=$SCORE_INFO
				else
					SCORE_EXIST=NONE
				fi
	
# CERTIF_CASES for ${PROCESS_STATUS}${SCORE_STATUS}$CERTIF_STATUS:
# 222 processed - scored 0 - certified
# 100 failed
# .00 created/processed/failed - no further info
# 21. processed - scored ylw/red 
# 20. processed - no score info 
# we store this info in ph_old_info_$MONTH_SCP for later reference below
				echo "$AB ${PROCESS_STATUS}${SCORE_STATUS}$CERTIF_STATUS $PLT_EXIST $SCORE_EXIST" >> $TMP_DIR/ph_old_info_$MONTH_SCP
			done
		fi	
	fi

# check for REJ: if found, send mail to investigate; REJ products should be listed in $DFO_MON_DIR/list_REJECTED
# and filtered out
	writeLog "4.7 Check for historical REJ flag ..."
	rm -f $TMP_DIR/ph_rej_found
	egrep " [0-2][0-2]1" $TMP_DIR/ph_old_info_$MONTH_SCP 2>/dev/null > $TMP_DIR/ph_rej_found
	if [ -s $TMP_DIR/ph_rej_found ]
	then
		echo "AB(s) found with historical REJ flag. Check and list in $DFO_MON_DIR/list_REJECTED to have them ignored" > $TMP_DIR/ph_mail
		cat $TMP_DIR/ph_rej_found >> $TMP_DIR/ph_mail
		mail -s "PHOENIX: REJ AB(s) for $DATE" $OP_ADDRESS <$TMP_DIR/ph_mail
		writeLog "    ... AB(s) found with historical REJ flag." 
	else
		writeLog "    ... none found."
	fi	

# =======================================================================
# 2. Prepare download files and processing jobs
# =======================================================================
# ph_list_ABs6 is cleaned from filtered ABs, so we better take this one
# we add now the RAW_TYPE since this could be useful later during certification (MCAL_MODE=YES)

	if [ $COMPLETE_MODE = NO ]
	then
		rm -f $DFO_MON_DIR/$AB_LIST
		for AB in `cat $TMP_DIR/ph_list_ABs6 | awk '{print $1}'`
		do
			RAW_TYPE=`grep "^RAW_TYPE" $AB | awk '{print $2}'`
			grep $AB $TMP_DIR/ph_list_ABs6 | awk '{print $1,rt,$2}' rt=$RAW_TYPE | awk '{print $1,$2" "}' >> $DFO_MON_DIR/$AB_LIST
		done
	fi

	if [ $COMPLETE_MODE = YES ]
	then
		cat $TMP_DIR/ph_list_ABs6 | sort -u > $DFO_MON_DIR/AB_list_SCIENCE_$DATE
		cat $DFO_MON_DIR/AB_list_CALIB_$DATE $TMP_DIR/ph_list_ABs6 | sort -u > $DFO_MON_DIR/AB_list_ALL_$DATE
		AB_LIST=AB_list_ALL_$DATE
	fi
	rm $TMP_DIR/ph_list_ABs6	

# =======================================================================
# 2.1 create ATAB files and call getStatusAB
# =======================================================================

	if [ $COMPLETE_MODE = YES ]
	then
		cat $DFO_MON_DIR//AB_list_SCIENCE_$DATE | sort -k 1,1 | awk '{if (NR <= 9) {print $1,"NO","SCI00"NR} else if (NR <= 99) {print $1,"NO","SCI0"NR} else {print $1,"NO","SCI"NR}}' > $TMP_DIR/ca_lookup1
		cat $DFO_MON_DIR/AB_list_CALIB_$DATE    | sort -k 1,1 | awk '{if (NR <= 9) {print $1,"NO","CAL00"NR} else if (NR <= 99) {print $1,"NO","CAL0"NR} else {print $1,"NO","CAL"NR}}' >> $TMP_DIR/ca_lookup1
	else
		case $MCAL_MODE in	
	 	 "NO"  ) cat $DFO_MON_DIR/$AB_LIST | awk '{if (NR <= 9) {print $1,"NO","SCI00"NR} else if (NR <= 99) {print $1,"NO","SCI0"NR} else {print $1,"NO","SCI"NR}}' > $TMP_DIR/ca_lookup1;;
	 	 "YES" ) cat $DFO_MON_DIR/$AB_LIST | awk '{if (NR <= 9) {print $1,"NO","CAL00"NR} else if (NR <= 99) {print $1,"NO","CAL0"NR} else {print $1,"NO","CAL"NR}}' > $TMP_DIR/ca_lookup1;;
		esac
	fi

# for SCIENCE or COMPLETE_MODE: create ATAB files here	
	if [ $MCAL_MODE = NO ] || [ $COMPLETE_MODE = YES ]
	then
		for AB in `cat $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
		do
			ALOG=`echo $AB | sed "s/.ab/.alog/"`
			ATAB=`echo $AB | sed "s/.ab/.tab/"`
		
			COMPLETE=`grep "^COMPLETENESS" $AB | awk '{print $2}'`
			if [ -s $ALOG ]
			then
				AFLAG=`egrep "WARNING|*NOK|Missing" $ALOG | grep -v MASSOC | awk '{print $6}' | sort -g -r | head -1 | awk '{printf"%10.1f\n",$1}' | sed "s/  0.0$/incompl./" | awk '{print $1}'`
			else
				echo "Not found" > $ALOG
				AFLAG=""
			fi

			if [ "Q$AFLAG" = Q ]
			then
				AFLAG=OK
			else
				AFLAG2=`egrep " VIRT " $ALOG | head -1 | awk '{print $1}'`
				if [ Q$AFLAG2 != Q ]
				then
					AFLAG="VIRT_$AFLAG"
				fi
			fi
		
			RECIPE=`grep 	"^RECIPE"  	$AB | awk '{print $2}'`
			RAW_TYPE=`grep	"^RAW_TYPE" 	$AB | awk '{print $2}'`
			DPR_CATG=`grep	"^DPR_CATG" 	$AB | awk '{print $2}'`
			SETUP=`grep "^RAW_MATCH_KEY"	$AB | grep -v UNDEFINED | grep -v TPL.START | grep -v ARCFILE | awk '{print $2}' | sed "s/^.*=//" | tr "\012" "_" | sed "s/_$//"`
	
			CMODE=ALL
			CTAG=CTAG1
			AB_INDEX=`grep $AB $TMP_DIR/ca_lookup1 | awk '{print $3}'`
			if [ $COMPLETE_MODE = YES ] && [ $DPR_CATG = CALIB ]
			then
# add historical comments and previous scoring if any
				OLD_INFO=`grep $AB $TMP_DIR/ph_old_info_$DATE   2>/dev/null | awk '{print $2}'`
				CERTIF_INFO=`echo $OLD_INFO | cut -c3,3`

				OLD_COMM=`grep $AB $TMP_DIR/ph_get_prod_results 2>/dev/null | sed "s/$AB//"`
				COMMENT="$OLD_INFO $OLD_COMM"
				if [ "Q$COMMENT" != Q ]
				then
					COMMENT=`echo "<font size=1 color=#666>${COMMENT}</font>" | sed "s/ /XYZ/g"`
				else
					COMMENT="COMMENT_NONE"
				fi

				PLT_INFO1=`grep $AB $TMP_DIR/ph_old_info_$DATE 2>/dev/null | awk '{print $3}'`
				if [ Q$PLT_INFO1 != Q ]
				then
					PLT_INFO=`filename $PLT_INFO1`
				fi
				if [ Q$PLT_INFO = QNONE ] || [ Q$PLT_INFO = Q ]
				then
					QC_STATUS=QC_NONE
				else
# this link will be interpreted by ABmonitor as link to QC_PLOT
					QC_STATUS="QCSTATUS_${RAW_TYPE}/${DATE}/${PLT_INFO}QCSTATUS"
					cp $DFO_AB_DIR/HISTO/${PLT_INFO} $DFS_PRODUCT/${RAW_TYPE}/${DATE}
				fi

				SCORE_INFO=`grep $AB $TMP_DIR/ph_old_info_$DATE 2>/dev/null | awk '{print $4}'`
				YELLOW_SCORE=`grep "^YELLOW_SCORE" $DFO_CONFIG_DIR/config.scoreQC 2>/dev/null | awk '{print $2}'`
				if [ Q$YELLOW_SCORE = Q ]
				then
					YELLOW_SCORE=1
				fi

				if [ Q$SCORE_INFO = QNONE ] || [ Q$SCORE_INFO = Q ]
				then
					SCORE_LINK=SCORELINK_NONE
					SCORE_COL=GREY
					SCORE_IMG=X
				else
					TLOG=`echo $SCORE_INFO | sed "s/html/tlog/"`
                			SCORE_RESULT=`grep "^TOTAL_SCORE" $DFO_AB_DIR/HISTO/$TLOG 2>/dev/null | awk '{print $2}'`
                			TOTAL_NUMBER=`grep "^TOTAL_NUMBER" $DFO_AB_DIR/HISTO/$TLOG 2>/dev/null | awk '{print $2}'`
					SCORE_LINK=SCLINK${SCORE_INFO}_SCORE${SCORE_RESULT}/${TOTAL_NUMBER}SCEND
					if [ Q${SCORE_RESULT} = QX ] || [ Q${SCORE_RESULT} = Q ]
					then
						SCORE_COL=GREY
						SCORE_IMG=X
# a score with TOTAL_NUMBER=0 is not a score
					elif ([ ${SCORE_RESULT} = 0 ] && [ $TOTAL_NUMBER = 0 ])
					then
						SCORE_COL=GREY
						SCORE_IMG=X
					elif [ ${SCORE_RESULT} = 0 ]
					then
						SCORE_COL=GREEN
						SCORE_IMG=0
					elif [ ${SCORE_RESULT} -le $YELLOW_SCORE ]
					then
						SCORE_COL=YELLOW
						SCORE_IMG=1
					else
						SCORE_COL=LIGHTRED
						SCORE_IMG=2
					fi

# cp to $DFO_AB_DIR to have the links for the AB monitor
					cp $DFO_AB_DIR/HISTO/$SCORE_INFO $DFO_AB_DIR
					cp $DFO_AB_DIR/HISTO/$TLOG $DFO_AB_DIR
				fi

# CERTIF_STATUS:
				if [ Q$CERTIF_INFO = Q ] || [ $CERTIF_INFO = 0 ]
				then
					CERTIF_INFO=CERTIFGREY
				elif [ $CERTIF_INFO = 2 ]
				then
					CERTIF_INFO=CERTIFGREEN
				elif [ $CERTIF_INFO = 1 ]
				then
					CERTIF_INFO=CERTIFLIGHTRED
				fi
			else
				COMMENT="COMMENT_NONE"
				CERTIF_INFO=CERTIFGREY
			fi

			AB_COLOR=`grep "^#STATUS" $DFO_BIN_DIR/getStatusAB | grep " $DPR_CATG " | grep " CREATED " | grep "NO WAITING" | awk '{print $6}'`
			AB_COLOR=`grep "^#COLOUR" $DFO_BIN_DIR/getStatusAB | grep " $AB_COLOR " | awk '{print $3}'`

			if [ $DPR_CATG = SCIENCE ]
			then
				echo "$AB_COLOR BQSNOTSCHEDULED MISSNONE ACTIVENO CONTAINEDNO $AB $AB_INDEX LEFT RIGHT $COMPLETE AFLAG$AFLAG $ALOG SCIENCEXML SCIENCEHTML SCIENCETXT RECIPE$RECIPE $RAW_TYPE $CMODE $CTAG $SETUP ... RBLOG_NONE AB_TEXEC QC_TEXEC QC_NONE COVER_NONE SCOREFLAG_NONE HC_NONE SCORE_NONE SCORELINK_NONE CERTIFGREY CERTIF_NONE $COMMENT" > $ATAB
			else
				echo "$AB_COLOR BQSNOTSCHEDULED MISSNONE ACTIVENO CONTAINEDNO $AB $AB_INDEX LEFT RIGHT $COMPLETE AFLAG$AFLAG $ALOG CALIBXML CALIBHTML CALIBTXT RECIPE$RECIPE $RAW_TYPE $CMODE $CTAG $SETUP ... RBLOG_NONE AB_TEXEC QC_TEXEC $QC_STATUS COVER_NONE SIMG${SCORE_IMG}YLW1 HC_NONE SCOL${SCORE_COL} $SCORE_LINK $CERTIF_INFO CERTIF_NONE $COMMENT" > $ATAB
			fi
		done

# for MCAL_MODE=YES: ATAB files exist, add comments and previous scoring (if any) 
	else
		for AB in `cat $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
		do
			ATAB=`echo $AB | sed "s/.ab/.tab/"`

			OLD_INFO=""
			if [ -s $TMP_DIR/ph_old_info_$MONTH_SCP ]
			then
				OLD_INFO=`grep $AB $TMP_DIR/ph_old_info_$MONTH_SCP | awk '{print $2}'`
			fi
	
			OLD_COMM=""
			if [ -s $TMP_DIR/ph_get_prod_results ]
			then
				OLD_COMM=`grep $AB $TMP_DIR/ph_get_prod_results | sed "s/$AB//"`
			fi
			COMMENT="$OLD_INFO $OLD_COMM"
			if [ "Q$COMMENT" != Q ]
			then
				COMMENT=`echo "<font size=1 color=#666>${COMMENT}<\/font>" | sed "s/ /XYZ/g"`
				sed -i -e "s|COMMENT_NONE|$COMMENT|" $ATAB
			fi
		done
	fi

	writeLog "5. Call getStatusAB (background)"
	getStatusAB -d $DATE &

# =======================================================================
# 2.2 Call createJob 
# 2.2.1 COMPLETE_MODE=YES
# =======================================================================

	rm -f $DFO_JOB_DIR/JOBS_NIGHT
	if [ $COMPLETE_MODE = YES ]
	then
		writeLog "6. Create processing jobs ..."
		writeLog "6.1 Call createJob -m CALIB -d $DATE ..."
		createJob -d $DATE -m CALIB | sed "s/^.*/	&/"
		
		writeLog "6.2 Call createJob -m $MODE -d $DATE ..."
	else
		
# =======================================================================
# 2.2.2 normal call for $MODE
# =======================================================================

		# BWo: removing ph_old_info_$MONTH_SCP distroys needed information for MCAL certification
		#rm -f $TMP_DIR/ph_old_info_$MONTH_SCP
		writeLog "6. Call createJob -m $MODE -d $DATE ..."
	fi	

	createJob -d $DATE -m $MODE | sed "s/^.*/	&/"

# sort ABs in execAB by RAW_TYPE: this is important for DRS_TYPE=INT
	DRS_TYPE=`grep "^DRS_TYPE" 	$DFO_CONFIG_DIR/OCA/config.createAB | awk '{print $2}'`
	QCBQS_TYPE=`grep "^QCBQS_TYPE"	$DFO_CONFIG_DIR/config.processQC | awk '{print $2}'`

# supported only for COMPLETE_MODE=NO
	if [ $DRS_TYPE != CON ] && [ $COMPLETE_MODE = NO ]
	then
		rm -f $TMP_DIR/ph_execAB
		grep "^#" $DFO_JOB_DIR/execAB_SCIENCE_$DATE > $TMP_DIR/ph_execAB
		for RT in `grep "^RAW_TYPE" $CONFIG | grep $PROC_INSTRUMENT | awk '{print $3}'`
		do
			grep " ${RT} " $DFO_MON_DIR/$AB_LIST | awk '{print $1}' | sed "s/^.*/processAB -a &/" >> $TMP_DIR/ph_execAB
		done
		mv $TMP_DIR/ph_execAB $DFO_JOB_DIR/execAB_SCIENCE_$DATE	
		chmod u+x $DFO_JOB_DIR/execAB_SCIENCE_$DATE
		writeLog "	DRS_TYPE=INT: execAB_SCIENCE_$DATE sorted according to configured RAW_TYPEs"

# add the logging
		sed -i -e "/execAB/s|^.*|& \| tee -a $TMP_DIR/phoenix1.log|" $DFO_JOB_DIR/JOBS_NIGHT
		sed -i -e "/execQC/s|^.*|& \| tee -a $TMP_DIR/phoenix1.log|" $DFO_JOB_DIR/JOBS_NIGHT
	fi
	JOBS_PHOENIX=$DFO_JOB_DIR/JOBS_PHOENIX_$DATE

# fix bug in createJob: empty line $DFO_JOB_DIR
	JOB_DIR=`eval "echo $DFO_JOB_DIR"`
	grep -v "^${JOB_DIR}$" $DFO_JOB_DIR/JOBS_NIGHT > $JOBS_PHOENIX
	rm $DFO_JOB_DIR/JOBS_NIGHT
	
	writeLog "    ... JOBFILE is $JOBS_PHOENIX."

# for CALIB: rawDown file useless (at least for monthly mode)
	if [ $MCAL_MODE = YES ]	
	then
# for the monthly mode we replace the rawDownload file by a better scheme
		if [ Q$MONTH != Q ]
		then
# start processing after 1 minute, downloads can run in background
			sed -i -e "/rawDown_CALIB/s/^.*/& \& sleep ${RAWDOWN_SLEEP}/" $JOBS_PHOENIX
# all RAWFILEs for that month to download before we start processing
			grep RAWFILE $DFO_AB_DIR/${DFO_FILE_NAME}.${MONTH}*.ab |\
 awk '{print $2}' |\
 sed "s|/$DFO_FILE_NAME| $DFO_FILE_NAME|" |\
 awk '{print "mkdir "$1" 2>/dev/null; cd "$1"; ngasClient -f "$2}' \
  > $TMP_DIR/rawDown_CALIB_$DATE

# we download in 6 parallel streams ...
			cat > $DFO_JOB_DIR/rawDown_CALIB_$DATE <<EOT
#!/bin/sh
# PHOENIX
# This is the list of RAW download calls to be executed before starting the cascade.
# It comes in 6 streams. It is special for the CALIB/monthly mode of $TOOL_NAME.
# This file is executable.
# =================================================
EOT

			LENGTH6=`wc -l $TMP_DIR/rawDown_CALIB_$DATE | awk '{printf"%i\n", $1/6}'`
			PART=1
			while [ $PART -le 6 ]
			do
				echo "#STREAM $PART:" > $DFO_JOB_DIR/rawDown_CALIB_${DATE}_$PART
				cat $TMP_DIR/rawDown_CALIB_$DATE | head -$LENGTH6 >> $DFO_JOB_DIR/rawDown_CALIB_${DATE}_$PART
				if [ $PART -lt 6 ]
				then
					echo "$DFO_JOB_DIR/rawDown_CALIB_${DATE}_$PART &" >> $DFO_JOB_DIR/rawDown_CALIB_${DATE}
				else
					echo "$DFO_JOB_DIR/rawDown_CALIB_${DATE}_$PART " >> $DFO_JOB_DIR/rawDown_CALIB_${DATE}
				fi
				sed -i -e "1,${LENGTH6} d" $TMP_DIR/rawDown_CALIB_$DATE
				PART=`echo $PART | awk '{print $1+1}'`
			done
			chmod u+x $DFO_JOB_DIR/rawDown_CALIB_${DATE}*
		fi
	fi

# =======================================================================
# 2.3 Add the mcalDown file 
# =======================================================================
	writeLog "7.1 Add the mcalDown file ..."

# for SCIENCE: MCALDOWNFILE
# for CALIB:   useless (at least for monthly mode)
	MCALDOWNFILE=mcalDown_${MODE}_$DATE
	rm -f $DFO_JOB_DIR/$MCALDOWNFILE
# MCAL_MODE=NO: normal phoenix mode
	if [ $MCAL_MODE = NO ]	
	then
		rm -f $TMP_DIR/ph_mcalibs* $TMP_DIR/$MCALDOWNFILE
	
		for AB in `cat $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
		do
			grep "^MCALIB" $DFO_AB_DIR/$AB | grep REAL | grep -v DFS_PRODUCT | grep -v " GEN " | awk '{print $3,"MCALIB"}' >> $TMP_DIR/ph_mcalibs
			grep "^MCALIB" $DFO_AB_DIR/$AB | grep REAL | grep -v DFS_PRODUCT | grep    " GEN " | awk '{print $3,"GEN"}'    >> $TMP_DIR/ph_mcalibs
		done
	
# max. 10 downloads in foreground (taken from createJob)
		if [ -s $TMP_DIR/ph_mcalibs ]
		then
			cat $TMP_DIR/ph_mcalibs | sed "s|/| |g" | awk '{print $2}' | sort -u > $TMP_DIR/ph_mcalibs1
			for D in `cat $TMP_DIR/ph_mcalibs1`
			do
				grep $D $TMP_DIR/ph_mcalibs | grep MCALIB | sort -u > $TMP_DIR/ph_mcalibs1
	
				if [ -s $TMP_DIR/ph_mcalibs1 ]
				then
					cat >> $TMP_DIR/$MCALDOWNFILE <<EOT
if [ ! -d $DFO_CAL_DIR/$D ]
then
	mkdir $DFO_CAL_DIR/$D
fi
cd $DFO_CAL_DIR/$D

EOT

# it seems that a simple copy is not any faster if multiple processes execute at the same time,
# hence we do it sequentially (could be made multi-threaded if needed)
# observed performance: about 30sec for 1 GB

# now we download mcalibs from NGAS if not found
					INDEX=0
					LAST=`cat $TMP_DIR/ph_mcalibs1 | wc -l`
					for MC in `cat $TMP_DIR/ph_mcalibs1 | awk '{print $1}'`
					do
						INDEX=`echo $INDEX | awk '{print $1+1}'`
						MC1=`basename $MC`
						case $INDEX in
						 $LAST|10|20|30|40|50|60|70|80|90|100|110|120|130|140|150|160|170|180|190) echo "ngasClient -c $MC1" >> $TMP_DIR/$MCALDOWNFILE
									echo "" >> $TMP_DIR/$MCALDOWNFILE ;;
						 *                         ) echo "ngasClient -c $MC1 &" >> $TMP_DIR/$MCALDOWNFILE ;;
						esac
					done
				fi
			done
	
			if [ -s $TMP_DIR/$MCALDOWNFILE ]
	       		then
				cat > $DFO_JOB_DIR/$MCALDOWNFILE <<EOT
#!/bin/sh
# DFOS
# This is the list of MCALIB download calls to be executed before starting the cascade.
# This file is executable.
# =================================================
EOT
	
				cat $TMP_DIR/$MCALDOWNFILE >> $DFO_JOB_DIR/$MCALDOWNFILE

				if [ Q$HDR_PGI != Q ]
				then
					echo "$DFO_BIN_DIR/$HDR_PGI" >> $DFO_JOB_DIR/$MCALDOWNFILE
				fi
				
				echo "sleep 60" >> $DFO_JOB_DIR/$MCALDOWNFILE
		       		writeLog "... $DFO_JOB_DIR/$MCALDOWNFILE created."
				chmod u+x $DFO_JOB_DIR/$MCALDOWNFILE
			else
	       			writeLog "    ... no mcals to download."
	        	fi
		else
	 		writeLog "    ... no mcals to download."
		fi
	fi

# =======================================================================
# 2.4 modify JOBS_FILE (only for COMPLETE_MODE=NO)
# =======================================================================

# include call of $MCALDOWNFILE
	if [ -s $DFO_JOB_DIR/$MCALDOWNFILE ] && [ $COMPLETE_MODE = NO ]
	then
		FIRST_PART=`grep -n "rawDown_${MODE}_$DATE"      $JOBS_PHOENIX | sed "s/:.*//"`
		SECND_PART=`grep -n "execAB_${MODE}_$DATE" $JOBS_PHOENIX | sed "s/:.*//"`

		cat $JOBS_PHOENIX | awk '{if (NR <= first) print}' first=$FIRST_PART > $TMP_DIR/JOBS_NIGHT
		echo "echo \"starting mcalib data download ...\"; $DFO_JOB_DIR/$MCALDOWNFILE 1>/dev/null" >> $TMP_DIR/JOBS_NIGHT
# just to be sure, sometimes not all files are properly downloaded
		echo "echo \"starting 2nd mcalib data download ...\"; $DFO_JOB_DIR/$MCALDOWNFILE 1>/dev/null" >> $TMP_DIR/JOBS_NIGHT	

		cat $JOBS_PHOENIX | awk '{if (NR >= secnd) print}' secnd=$SECND_PART >> $TMP_DIR/JOBS_NIGHT
		mv $TMP_DIR/JOBS_NIGHT $JOBS_PHOENIX
	fi

	cp $JOBS_PHOENIX $TMP_DIR/JOBS_NIGHT

# =======================================================================
# 2.5 Call JOB_PGI if configured
#     This gives us the chance to implement non-std things like 2stream processing,
#     and fine-tuning required behaviour for download patterns
# =======================================================================

	if [ Q$JOB_PGI != Q ]
	then
		writeLog "7.2 Apply configured JOB_PGI: \$DFO_BIN_DIR/$JOB_PGI ..."	
		$DFO_BIN_DIR/$JOB_PGI $DATE 
		writeLog "... done."
	else
		writeLog "7.2 JOB_PGI: not configured."
	fi

# if CALIB, add the qualityChecker to prepare for autoCertifier
	#if [ $MCAL_MODE = YES ]
	if [ $MCAL_MODE = YES ] || ([ $COMPLETE_MODE = YES ] )
	then
		writeLog "7.3 Create qualityChecker_$DATE ..."
		qualityChecker
		echo "$DFO_JOB_DIR/qualityChecker_$DATE " >> $TMP_DIR/JOBS_NIGHT
	fi
	mv $TMP_DIR/JOBS_NIGHT $JOBS_PHOENIX

# if CALIB, add the cal_QC flags for each participating date (ex $DATE since already included)
	if [ $MCAL_MODE = YES ] && [ Q$MONTH != Q ]
	then
		for DD in `cat $TMP_DIR/ph_list_dates | grep -v $DATE`
		do
			cat >> $JOBS_PHOENIX <<EOT
echo "cal_QC $DD `date +%Y-%m-%d"T"%H:%M:%S`" >> $DFO_MON_DIR/DFO_STATUS
EOT
		done
	fi

# add the logging
	PLOG1="$TMP_DIR/phoenix1.log"
	sed -i -e "/vultur_exec/s|^.*|& \| tee -a $PLOG1|" $JOBS_PHOENIX
	chmod u+x $JOBS_PHOENIX
	rm -f $PLOG1
	writeLog "... $JOBS_PHOENIX ready for execution."
	dfoMonitor -q -n &
	
# =======================================================================
# 2.6 Decide how to continue
# 2.6.1 for QCJOB_CONCENTRATION: turn off the QCSCIENCE jobs
#	Supported for COMPLETE_MODE = NO
# =======================================================================

	if [ $QCJOB_CONCENTRATION = YES ] && [ $MONTHLY_MODE = YES ] && [ $COMPLETE_MODE = NO ]
	then
		sed -i -e "/QCSCIENCE/s|^.*|#&|" $JOBS_PHOENIX
		sed -i -e "/sci_QC/s|^.*|#&|"    $JOBS_PHOENIX
		MONTH=`echo $DATE | cut -c 1-7`
		cat $DFO_JOB_DIR/execQC_SCIENCE_$DATE | grep -v "^#" >> $DFO_JOB_DIR/execQC_SCIENCE_$MONTH
		writeLog "7.3 QCJOB_CONCENTRATION=YES: execQC job added to monthly execQC_SCIENCE_$MONTH, for later execution."
		writeLog "    Find the corresponding log in the last date of this month."
	fi

# TARGET_SELECT=YES: modify last sci_Queued entry by adding TARGET_SELECT; when later option -M is called, this will be used to ask the user about TARGET_SELECT
	if [ $DEEP_MODE = YES ] && [ $TARGET_SELECT = YES ]
	then
		LAST_ENTRY=`grep "^sci_Queued" $DFO_MON_DIR/DFO_STATUS | grep " $DATE " | sort -k3,3 | tail -1 | awk '{print $3}'`
		sed -i -e "/$LAST_ENTRY/s|^.*|& TARGET_SELECT|" $DFO_MON_DIR/DFO_STATUS
	fi

# =======================================================================
# 2.6.2 stop here if -C is set
# =======================================================================

	if [ $CREATE_ONLY = YES ]
	then
		writeLog "Option -C is set, ABs and jobs have been created and are ready for execution by hand, listed in \$DFO_JOB_DIR/JOBS_AUTO. Exit."
		if [ $DEEP_MODE = YES ]
		then
			writeLog "This is DEEP_MODE=YES. We work on a dummy date $DATE, derived from the runID $RUN_ID."
			writeLog "The original DATE is preserved in the ABs as CIVIL_DATE."
		fi

# entry in JOBS_AUTO
		CHECK_EXIST=`grep $JOBS_PHOENIX $DFO_JOB_DIR/JOBS_AUTO`
		if [ Q$CHECK_EXIST = Q ]
		then
			echo "$JOBS_PHOENIX" >> $DFO_JOB_DIR/JOBS_AUTO
		fi
		exit
	fi

# =======================================================================
# 3.0 call $JOBS_PHOENIX 
# =======================================================================

	if [ $QCJOB_CONCENTRATION = YES ]
	then
		writeLog "8.1 Call $JOBS_PHOENIX (only pipeline part) ..."
	else
		writeLog "8. Call $JOBS_PHOENIX ..."
	fi
	TIMESTAMP1=`date +%Y-%m-%d" "%H:%M:%S`
	TIMESTAMP_START=`date +%s`	# for elapsed time

# enable overwrite of FITS files in $DFS_PRODUCT/*/$DATE since
# QC procedure may have set u-w for the proper flux scaling
	chmod u+w $DFS_PRODUCT/*/$DATE/*fits 2>/dev/null

# do it	
	$JOBS_PHOENIX 

	cat $PLOG1 | sed "s/^.*/	&/" >> $PLOG
	
	writeLog "... finished."
	TIMESTAMP2=`date +%Y-%m-%d" "%H:%M:%S`
	TIMESTAMP_END=`date +%s`	# for elapsed time

# check if AB failed; add analysis like "RAW file unavailable" etc
	rm -f $TMP_DIR/ph_failures
	for AB in `cat $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
	do
		CHECK_FAILURE=`grep "^PROCESS_STATUS" $DFO_AB_DIR/$AB | grep FAILED`
		if [ "Q$CHECK_FAILURE" != Q ]
		then
			echo "$AB" >> $TMP_DIR/ph_failures
		fi
	done

	if [ -s $TMP_DIR/ph_failures ]
	then
		cat > $TMP_DIR/ph_mail <<EOT
The following AB(s) have failed processing:
EOT
		cat $TMP_DIR/ph_failures >> $TMP_DIR/ph_mail
		mail -s "PHOENIX: failed AB(s) for $DATE" $OP_ADDRESS <$TMP_DIR/ph_mail
	fi

# stop here if PROCESS_ONLY is set
	if [ $PROCESS_ONLY = YES ]
	then
		CHECK_EXIST=`grep "^$DATE" $STAT_FILE | head -1`

		if [ $PROCESS_PART = NO ] && [ "Q$CHECK_EXIST" = Q ]
		then
			writeStats PROCESS_ONLY
		elif [ "Q$CHECK_EXIST" != Q ]
		then
			echo "***WARNING: Entry for $DATE found in $STAT_FILE, no update of statistics done."
		fi

		writeLog "Option -P is set, jobs have been executed and are ready for inspection. Exit."

		if [ $DEEP_MODE = YES ]
		then
			mv $PLOG $DFO_MON_DIR/phoenix_deep_$DATE.log
		else
			mv $PLOG $DFO_MON_DIR/phoenix_$DATE.log
		fi
	
		dfoMonitor -n -q &
		exit
	fi
	if [ $DEEP_MODE = YES ]
	then
		mv $PLOG $DFO_MON_DIR/phoenix_deep_$DATE.log
	else
		mv $PLOG $DFO_MON_DIR/phoenix_$DATE.log
	fi
fi

# =======================================================================
# 4. call moveProducts
# This is the entry point for option -M
# =======================================================================

if [ Q$CERTIFY_DONE = Q ]
then
	CERTIFY_DONE=NO
fi

# =======================================================================
# 4.0.1 Special for SHOOT/XSHOO
# We now check if $DFO_FILE_NAME is properly set by .dfosrc. This is required to have a warning for the case 
# SHOOT vs. XSHOO. All previous steps work fine without actually checking this!
# We cannot check this earlier since there are usually no raw files in the system at the begin.
# =======================================================================

FIRST=`ls $DFO_RAW_DIR/$DATE/*fits 2>/dev/null | head -1`
if [ Q$FIRST != Q ] 
then
	FIRST1=`basename $FIRST | sed "s/\./ /g" | awk '{print $1}'`
	if [ $FIRST1 != $DFO_FILE_NAME ]
	then
		echo "***ERROR: There is an issue with your definition of \$DFO_FILE_NAME (set to $DFO_FILE_NAME)"
		echo "          while the files in $DFO_RAW_DIR/$DATE start with $FIRST1. Fix by sourcing the proper .dfosrc and try again. Exit."
		exit
	fi	
fi

# =======================================================================
# 4.0.2 for MCAL_MODE=YES, we call autoCertifier now
# =======================================================================

if [ $MCAL_MODE = YES ] && [ $MONTHLY_MODE = YES ] && [ Q$MONTH = Q ] && [ Q$DATE != Q ] 
then
	export FORCE_CHECK=YES
# call qualityChecker again, to refresh all CERTIF entries
	writeLog "9.0 Refreshing qualityChecker_$DATE ..."
	$DFO_JOB_DIR/qualityChecker_$DATE	

	if [ $MOVEP_ONLY = NO ]
	then

# quick check if issues exist
		for AB in `cat $DFO_MON_DIR/$AB_LIST | awk '{print $1}'`
		do
			ATAB=`echo $AB | sed "s/.ab/.tab/"`
			TLOG=`echo $AB | sed "s/.ab/.tlog/"`
			HTML=`echo $AB | sed "s/.ab/.html/"`

			DATE1=`grep "^DATE" $DFO_AB_DIR/$AB | awk '{print $2}'`
			PROD_PATH=`grep "^PROD_PATH" $DFO_AB_DIR/$AB | awk '{print $2}'`
			PROD_PATH=`eval "echo $PROD_PATH"`
			PROD_ROOT_NAME=`grep "^PROD_ROOT_NAME" $DFO_AB_DIR/$AB | awk '{print $2}'`

# no ATAB? force to review
			if [ ! -s $DFO_AB_DIR/$ATAB ]
			then
				echo "PHOENIX: issue with certification for $DATE, review needed." > $TMP_DIR/ph_mail
				mail -s "PHOENIX: issue with certification for $DATE, review needed." $OP_ADDRESS <$TMP_DIR/ph_mail
				exit
			fi

# not green? force to review
			CHECK_GREEN=`grep " CERTIFGREEN " $DFO_AB_DIR/$ATAB`
			if [ "Q$CHECK_GREEN" != Q ]
			then
				echo "PHOENIX: issue with certification for $DATE, review needed." > $TMP_DIR/ph_mail
				mail -s "PHOENIX: issue with certification for $DATE, review needed." $OP_ADDRESS <$TMP_DIR/ph_mail
				exit
			fi	

# failed? force to review	
			PROCESS_STATUS=`grep "^PROCESS_STATUS" $DFO_AB_DIR/$AB | awk '{print $2}'`
			if [ Q$PROCESS_STATUS = QFAILED ]
			then
				echo "PHOENIX: issue with certification for $DATE, review needed." > $TMP_DIR/ph_mail
				mail -s "PHOENIX: issue with certification for $DATE, review needed." $OP_ADDRESS <$TMP_DIR/ph_mail
				exit
			fi
		done
	fi

# ok, if no issues exists, autoCertifier can run in automatic mode
	writeLog "9.1 Call autoCertifier ... "
	autoCertifier
	writeLog "... done. Refresh AB monitor ..."
	getStatusAB -d $DATE -f
	if [ $MOVEP_ONLY = NO ]
	then
		writeLog "... done with autoCertifier. We continue with the moveProducts loop."
	else
		writeLog "We continue with the moveProducts loop."
		echo "			Hit return:"
		read input
	fi

# =======================================================================
# 4.0.3 for MCAL_MODE=YES, we have to go by date now. 
# =======================================================================
# create list of dates
	rm -f $TMP_DIR/ph_list_dates

# extract the daily AB_lists from the monthly $BATCH
	BATCH="AB_list_CALIB_`echo $DATE | cut -c1-7`_BATCH"
	if [ ! -s $DFO_MON_DIR/$BATCH ]
	then
		echo "***ERROR: no \$DFO_MON_DIR/$BATCH found. Can't run in monthly mode. Pls. check and fix."
		exit
	fi

	rm -f $TMP_DIR/ph_list_all_ABs
	cat $DFO_MON_DIR/${BATCH} | awk '{print ab_dir"/"$1}' ab_dir="$DFO_AB_DIR" > $TMP_DIR/ph_list_all_ABs

	DAY=1
	VC_DATE=$DATE
	if [ Q$NEW_SIZE = Q ]
	then
		NEW_SIZE=$POOL_SIZE
	fi

	while [ $DAY -le $NEW_SIZE ]
	do
		rm -f $TMP_DIR/ph_list_ABs
		grep "^DATE" `cat $TMP_DIR/ph_list_all_ABs` | grep "[[:space:]]${VC_DATE}" | sed "s/:DATE.*//" > $TMP_DIR/ph_list_ABs

# extract AB_LIST per date (required for moveProducts)
		if [ -s $TMP_DIR/ph_list_ABs ]
		then
			echo $VC_DATE >> $TMP_DIR/ph_list_dates
			rm -f $DFO_MON_DIR/AB_list_CALIB_${VC_DATE}
			for AB1 in `cat $TMP_DIR/ph_list_ABs`
			do
				AB2=`basename $AB1`
				grep "$AB2" $DFO_MON_DIR/$BATCH >> $DFO_MON_DIR/AB_list_CALIB_$VC_DATE
			done
		fi

		VC_DATE=`qcdate $DATE -$DAY`
		DAY=`echo $DAY | awk '{print $1+1}'`
	done

# now call phoenix day by day
	cp $DFO_MON_DIR/phoenix_$DATE.log $DFO_MON_DIR/phoenix_FIRST_PART.log	#for later completion of log
	export REF_DATE=$DATE	
	
	for D in `cat $TMP_DIR/ph_list_dates | sort`
	do
# daily call; FORCE_CHECK to enforce updateAB to replace VIRTUAL by REAL names
		export FORCE_CHECK=YES
# CERTIFY_DONE to tell the tool that autoCertifier has already been done
		export CERTIFY_DONE=YES
		$TOOL_NAME -d $D -M -X 
	done

# only at the end, no value if in between
	$DFO_GUI_DIR/refresh_browser $DFO_MON_DIR/dfoMonitor.html
		
# final cleanups
	mv $DFO_MON_DIR/AB_list_CALIB_`echo $REF_DATE | cut -c1-7`_BATCH $DFO_MON_DIR/FINISHED/`echo $REF_DATE | cut -c1-4`/`echo $REF_DATE | cut -c1-7`
	rm $DFO_MON_DIR/phoenix_FIRST_PART.log
	exit

# MCAL_MODE = YES, day by day: check nevertheless that AB_list exists, could be an xceptional direct daily call
#was: elif ([ $MCAL_MODE = YES ] && [ $MONTHLY_MODE = NO ] && [ Q$MONTH = Q ] && [ Q$DATE != Q ]) 
elif ([ $MCAL_MODE = YES ] && [ $MONTHLY_MODE = NO ] && [ Q$MONTH = Q ] && [ Q$DATE != Q ]) || [ $COMPLETE_MODE = YES ]
then

# exceptional direct daily call
	if [ $CERTIFY_DONE = NO ] 
	then
		AB_LIST=AB_list_CALIB_$DATE
# check if existing, otherwise exit
		if [ ! -s $DFO_MON_DIR/$AB_LIST ]
		then
			echo "				*** INFO: no ABs found, exit."
			exit
		fi
		
# call qualityChecker again, to refresh all CERTIF entries
		export FORCE_CHECK=YES
		writeLog "9.0 Refreshing qualityChecker_$DATE ..."
		$DFO_JOB_DIR/qualityChecker_$DATE	

		if [ $MOVEP_ONLY = NO ]
		then
			echo "*** INFO: stopped after processing, before autoCertifier. Can't do in automatic mode."
			echo "		Call with option -M to review and proceed."
			exit
		fi

# ok, if no issues exists, autoCertifier can run in automatic mode
		writeLog "9.1 Call autoCertifier (CALIBs only) ... "

		CHECK_EXIST=`grep CALIB $DFO_LST_DIR/PRODCOM/prod_comment_$DATE.txt 2>/dev/null | head -1`
		if [ "Q$CHECK_EXIST" != Q ]
		then
			echo "    CALIB entries found in $DFO_LST_DIR/PRODCOM/prod_comment_$DATE.txt, please check, they might be obsolete. Hit return to continue:"
			read input
		fi

		if [ -d $DFO_CAL_DIR/$DATE ]
		then
			CHECK_EXIST=`ls $DFO_CAL_DIR/$DATE | grep fits | head -1`
			if [ "Q$CHECK_EXIST" != Q ]
			then
				echo "    Check $DFO_CAL_DIR/$DATE, danger to overwrite products. Check content and then hit return:"
				read input
			fi
		fi 
		autoCertifier
		writeLog "... done. Refresh AB monitor ..."
		getStatusAB -d $DATE -f
	fi

	if [ $MOVEP_ONLY = NO ] || [ $COMPLETE_MODE = YES ]
	then
		writeLog "... done with autoCertifier. We continue with the moveProducts loop."
	else
		writeLog "We continue with the moveProducts loop."
		echo "			Hit return:"
		read input
	fi

	AB_LIST=AB_list_CALIB_$DATE
	if [ ! -s $DFO_MON_DIR/$AB_LIST ]
	then
		BATCH="AB_list_CALIB_`echo $DATE | cut -c1-7`_BATCH"
		if [ ! -s $DFO_MON_DIR/$BATCH ]
		then
			echo "***ERROR: no \$DFO_MON_DIR/$BATCH found. Can't run in daily mode. Pls. check and fix."
			exit
		fi

		rm -f $TMP_DIR/ph_list_ABs
		cat $DFO_MON_DIR/${BATCH} | awk '{print ab_dir"/"$1}' ab_dir="$DFO_AB_DIR" > $TMP_DIR/ph_list_all_ABs
		grep "^DATE" `cat $TMP_DIR/ph_list_all_ABs` | grep "[[:space:]]${DATE}" | sed "s/:DATE.*//" > $TMP_DIR/ph_list_ABs
		if [ -s $TMP_DIR/ph_list_ABs ]
		then
			for AB1 in `cat $TMP_DIR/ph_list_ABs`
			do
				AB2=`basename $AB1`
				grep "$AB2" $DFO_MON_DIR/$BATCH >> $DFO_MON_DIR/AB_list_CALIB_$DATE
			done
		fi
	fi
fi

case $DEEP_MODE in
  "YES" ) PLOG=$DFO_MON_DIR/phoenix_deep_$DATE.log;;
  "NO"  ) PLOG=$DFO_MON_DIR/phoenix_$DATE.log;;
esac

# =======================================================================
# 4.0.4 MCAL_MODE=NO: If configured, call CERTIF_PGI 
# =======================================================================
if [ $TARGET_SELECT = YES ]
then
	if [ ! -s $DFO_AB_DIR/$DPC_AB ]
	then
		echo "*** ERROR:   Target selection mode chosen, but $DPC_AB does not exist. Exit."
		exit -1
	fi
fi

# avoid useless errors from getStatusAB
WHOAMI=`whoami`
GET_PID=`ps -aef | grep $WHOAMI | grep "getStatusAB -d $DATE -r" | grep -v grep | awk '{print $2}'`
if [ Q$GET_PID != Q ]
then
	kill -9 $GET_PID
fi

echo "======================================================================================================" | tee -a $PLOG
if [ $MOVEP_ONLY = YES ]
then
	writeLog "phoenix called with option -M"
fi

if [ $MCAL_MODE = NO ] && [ $CERTIF_REQUIRED = YES ]
then
# check for existence of $DFS_PRODUCT/$CERTIF_PROCATG/$DATE
	if [ $DEEP_MODE = YES ]
	then
		if [ ! -d $DFS_PRODUCT/$CERTIF_PROCATG/$DATE ]
		then
			echo "***ERROR: no product directory \$DFS_PRODUCT/\$CERTIF_PROCATG/$DATE found, can't do. Check entered run_ID."
			exit
		fi
	fi

# last sci_Queued entry as TARGET_SELECT?
	LAST_QUEUED_ENTRY=`grep " $DATE " $DFO_MON_DIR/DFO_STATUS | grep sci_Queued | tail -1 | awk '{print $3}'`
	IS_TARGET_SELECT=`grep "$LAST_QUEUED_ENTRY" $DFO_MON_DIR/DFO_STATUS | grep TARGET_SELECT | awk '{print $4}'`
	if [ "Q$IS_TARGET_SELECT" = QTARGET_SELECT ]
	then
# we offer a dialog to explore if the user wants to move only the last AB or all for that pseudo-date (because we cannot know)
		echo "Last execution for pseudo-date $DATE (RUN_ID $RUN_ID) was flagged as TARGET_SELECT (called for one AB only).
We can move either products for exactly one AB		(L: the one processed last)
or ALL currently existing dpc ABs for $DATE	(A).

Please enter your choice (default is A):"
		read AB_SELECT
		if [ Q$AB_SELECT != QL ]
		then
			grep RAW_TYPE $DFO_AB_DIR/*.ab | sed "s/:RAW_TYPE//" | sed "s|$DFO_AB_DIR/||" | sed "s/^.*/& /" > $DFO_MON_DIR/AB_list_SCIENCE_${DATE}_new
			echo "	New AB list \$DFO_MON_DIR/AB_list_SCIENCE_${DATE}_new created, containing the following dpc ABs:"
			grep dpc.ab $DFO_MON_DIR/AB_list_SCIENCE_${DATE}_new | sed "s/^.*/	&/"
			echo "	We will use the -M option to move all these ABs and their products."
			echo "	OK Y/N (N)?"
			read IS_OK
			if [ Q$IS_OK != QY ]
			then
				echo "	Think about it."
				rm $DFO_MON_DIR/AB_list_SCIENCE_${DATE}_new
				exit
			else
				echo "	OK, we now use this list for the -M option."
				mv $DFO_MON_DIR/AB_list_SCIENCE_${DATE}_new $DFO_MON_DIR/AB_list_SCIENCE_${DATE}
			fi
		else
			grep dpc.ab $DFO_MON_DIR/AB_list_SCIENCE_${DATE} | sed "s/^.*/	&/"
			echo "	We will use the -M option for this DEEP AB only (and its dependents), all others remain untouched." 
			echo "	OK Y/N (N)?"
			read IS_OK
			if [ Q$IS_OK != QY ]
			then
				echo "	Think about it."
				exit
			else
				echo "	OK, we continue."
			fi
		fi
	fi

# last entry for $DATE?
	LAST_ENTRY=`grep " $DATE " $DFO_MON_DIR/DFO_STATUS | tail -1 | awk '{print $1}'`

	if [ Q$LAST_ENTRY = Qsci_Certif ] 
	then
		echo "Last flag is 'sci_Certif', certification done already; do you want to repeat (r) or continue (hit return)?"
		read input
		if [ Q$input = Qr ]
		then
			writeLog "9.1 Certification configured. Apply \$DFO_BIN_DIR/$CERTIF_PGI ..."
			echo "			Hit return to continue:"
			read input
			$DFO_BIN_DIR/$CERTIF_PGI $DATE | sed "s/^.*/	&/"
			writeLog "... done."
		fi
	else
		writeLog "9.1 Certification configured. Apply \$DFO_BIN_DIR/$CERTIF_PGI ..."
		echo "			Hit return to continue:"
		read input
		$DFO_BIN_DIR/$CERTIF_PGI $DATE | sed "s/^.*/	&/"
		writeLog "... done."
	fi
fi

# =======================================================================
# 4.1 Normal part
# =======================================================================

if [ $MOVEP_ONLY = YES ]
then
	if [ $MCAL_MODE = YES ] && [ Q$MONTH != Q ]
	then
		writeLog "[From now on the log applies to the specified date]"	
	fi

	LAST_ENTRY=`grep " $DATE " $DFO_MON_DIR/DFO_STATUS | tail -1 | awk '{print $1}'`
	if [ $COMPLETE_MODE = YES ]
	then
		LAST_ENTRY=`grep " $DATE " $DFO_MON_DIR/DFO_STATUS | grep -v cal_Renamed | tail -1 | awk '{print $1}'`
	fi
		
	if [ Q$LAST_ENTRY = Q ]
	then
		:
	elif  [ Q$CERTIF_REQUIRED != QYES ] && ([ Q$LAST_ENTRY != Qsci_QC ] && [ Q$LAST_ENTRY != Qcal_QC ])
	then
		case $MCAL_MODE in
		 #"NO" ) writeLog "***ERROR: Last entry for $DATE in DFO_STATUS is $LAST_ENTRY, expected is: sci_QC or sci_Certif. Check out and start again." ; exit;;
		 "NO" ) if [ $COMPLETE_MODE = YES ]
			then
				echo "***INFO: Last entry in DFO_STATUS is $LAST_ENTRY but should be sci_QC or sci_Certif. This is perhaps ok if you have run this DATE exceptionally. OK (Y/N) (N)?"
				read OK_YN
                                if [ Q$OK_YN != QY ]
                                then
					writeLog "Check oout and start again." 
					exit 
				fi	
			fi ;;
		 "YES") if [ $LAST_ENTRY = "cal_Renamed" ] 
			then
				echo "***INFO: Last entry in DFO_STATUS is $LAST_ENTRY, which is fine if you have just renamed. OK (Y/N) (N)?"
				read OK_YN
				if [ Q$OK_YN != QY ]
				then
					writeLog "***ERROR: Last entry for $DATE in DFO_STATUS is $LAST_ENTRY, expected is: cal_QC. Check out and start again."
					exit
				fi
			fi ;;
		esac
	elif [ Q$CERTIF_REQUIRED = QYES ] && [ Q$LAST_ENTRY != Qsci_Certif ] 
	then
		writeLog "***ERROR: Last entry for $DATE in DFO_STATUS is $LAST_ENTRY, expected is: sci_QC. Check out and start again."
		exit
	fi
fi

# UPDATE Y/N?
if [ $MCAL_MODE = NO ] && [ $UPDATE_YN = NO ] 
then
# NO: we delete all previous files, if any, in $DFO_SCI_DIR/$DATE
	rm -rf $DFO_SCI_DIR/$DATE
fi

if [ $MCAL_MODE = YES ]
then
	writeLog "[9.1 autoCertifier: already called]"
fi

if [ $COMPLETE_MODE = YES ]
then
	ORIG_CAL_DIR=$DFO_CAL_DIR
	export DFO_CAL_DIR=${ORIG_CAL_DIR}/NEW
# just in case
	mkdir $DFO_CAL_DIR 2>/dev/null
	mkdir $DFO_CAL_DIR/VCAL 2>/dev/null
	cat > $DFO_CAL_DIR/VCAL/AAREADME <<EOT
This is a dummy directory, defined because moveProducts expects it.
EOT

	writeLog "===================================================================="
	writeLog "9.2.1 Moving CALIB products to $DFO_CAL_DIR/\${CIVIL_DATE} (as listed in the ABs)"
	writeLog "    [logs go to qcweb/$RELEASE]"
	writeLog "    ..."

	rm -f $TMP_DIR/ph_list_civil_dates
	for AB in `cat $DFO_MON_DIR/AB_list_CALIB_$DATE | awk '{print $1}'`
	do
		RBLOG=`echo $AB | sed "s/.ab/.rblog/"`
		AB_ROOT=`echo $AB | sed "s/\.ab//"`
		PROD_PATH=`grep "^PROD_PATH" 		$DFO_AB_DIR/$AB | awk '{print $2}'`
		PROD_PATH=`eval "echo $PROD_PATH"`
		PROD_ROOT_NAME=`grep "^PROD_ROOT_NAME"	$DFO_AB_DIR/$AB | awk '{print $2}'`
		CIVIL_DATE=`grep "^CIVIL_DATE"		$DFO_AB_DIR/$AB | awk '{print $2}'`
		if [ Q$CIVIL_DATE = Q ]
		then
			echo "***ERROR: CIVIL_DATE not found in AB. Moved to $DFO_CAL_DIR and left there."
		else
			mkdir $DFO_CAL_DIR/$CIVIL_DATE 	2>/dev/null
		fi
		mkdir $DFO_LOG_DIR/$DATE 	2>/dev/null
		mkdir $DFO_PLT_DIR/$DATE 	2>/dev/null

# products go to $CIVIL_DATE; suppress errors due to failed ABs
		ls $PROD_PATH/${PROD_ROOT_NAME}*fits 2>/dev/null | sed "s|^.*|& $CIVIL_DATE|" >> $TMP_DIR/ph_list_civil_dates
		mv $PROD_PATH/${PROD_ROOT_NAME}*fits $DFO_CAL_DIR/$CIVIL_DATE/ 2>/dev/null

# logs go to $DATE
# check if associated to other BATCH_IDs
# if yes, cp instead of mv the AB etc.
		CHECK_OTHER=`grep "^BATCH_ID" 	$DFO_AB_DIR/$AB | awk '{print $2}' | grep -v $DATE | head -1`
		if [ "Q$CHECK_OTHER" = Q ]
		then
			mv $DFS_LOG/$RBLOG 	   $DFO_LOG_DIR/$DATE/ 2>/dev/null
			mv $DFO_AB_DIR/${AB_ROOT}* $DFO_LOG_DIR/$DATE/ 2>/dev/null
			rm $DFO_AB_DIR/HISTO/${AB_ROOT}* 2>/dev/null
		else
# AB products and RBLOG needed also for other batches, for completeness
			cp $DFS_LOG/$RBLOG 	   $DFO_LOG_DIR/$DATE/ 2>/dev/null
			cp $DFO_AB_DIR/${AB_ROOT}* $DFO_LOG_DIR/$DATE/ 2>/dev/null
			sed -i -e "/^BATCH_ID.*CALIB_$DATE/d" $DFO_AB_DIR/$AB
		fi

# QC plots go to $DFO_PLT_DIR
		mv $PROD_PATH/${PROD_ROOT_NAME}*gif $DFO_PLT_DIR/$DATE 2>/dev/null
		mv $PROD_PATH/${PROD_ROOT_NAME}*png $DFO_PLT_DIR/$DATE 2>/dev/null
		mv $PROD_PATH/${PROD_ROOT_NAME}*jpg $DFO_PLT_DIR/$DATE 2>/dev/null
	done

# export log files
	cd $DFO_LOG_DIR
	writeLog "  create logs.tar ..."
	tar cf logs.tar $DATE
	writeLog "  scp logs.tar to ${DFO_WEB_SERVER}:${INSTR_DIR}/logs/$DATE ..."
	scp -o $SCP_TIMEOUT -o BatchMode=yes logs.tar ${DFO_WEB_SERVER}:${INSTR_DIR}/logs/ 1>/dev/null

	writeLog "  untar logs.tar on ${DFO_WEB_SERVER} (background) ..."
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $DFO_WEB_SERVER "bin/webTarUnpack ${INSTR_DIR}/logs/ logs.tar" 1>/dev/null &

# export histo plot files (to have the links right on the AB monitor)
	cd $DFO_PLT_DIR
	writeLog "  create plots.tar ..."
	tar cf plots.tar $DATE
	writeLog "  scp plots.tar to ${DFO_WEB_SERVER}:${INSTR_DIR}/plots/$DATE ..."
	scp -o $SCP_TIMEOUT -o BatchMode=yes plots.tar ${DFO_WEB_SERVER}:${INSTR_DIR}/plots/ 1>/dev/null

	writeLog "  untar plots.tar on ${DFO_WEB_SERVER} (background) ..."
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $DFO_WEB_SERVER "bin/webTarUnpack ${INSTR_DIR}/plots/ plots.tar" 1>/dev/null &

# rename mcalibs to their original (old) name: disabled
# renaming: better done with new names!
	#CDATE_MIN=`cat $TMP_DIR/ph_list_civil_dates | awk '{print $2}' | sort -u | head -1`
	#CDATE_MAX=`cat $TMP_DIR/ph_list_civil_dates | awk '{print $2}' | sort -u | tail -1`
# make the time window larger than strictly necessary, to be on the safe side
	#MJD1=`qcdate $CDATE_MIN | awk '{print $1-2}'`
	#MJD2=`qcdate $CDATE_MAX | awk '{print $1+2}'`

	#cat > $TMP_DIR/sl_main_query <<EOT
#SELECT
#	origfile , 
#	mjd_obs ,
#	pro_catg ,
#	ins_mode ,
#	pipefile ,
#	dp_id 
#FROM
#	qc_metadata..qc_products
#WHERE
#	mjd_obs between $MJD1 and $MJD2
#AND
#	instrume = "$DFO_INSTRUMENT"
#AND
#	virtual != 1
#GO
#EOT

# find the archived mcalibs
	#writeLog "  Prepare for renaming ..."
	#rm -f $TMP_DIR/sl_main_results
	#isql -Uqc -S$ARCH_SERVER -w999 -P`cat $ARCH_PWD` -i $TMP_DIR/sl_main_query | sed "1,1 d" | grep -v "affected" | grep -v "\-\-\-" | sed "/^$/d" > $TMP_DIR/sl_main_results

	writeLog "9.2.2 Looping across all dates with new mcalibs (evaluating CIVIL_DATE in ABs) ..."
	writeLog "      (new mcalibs are distributed in $DFO_CAL_DIR, date by date)"
	for CDATE in `cat $TMP_DIR/ph_list_civil_dates | awk '{print $2}' | sort -u`
	do
		writeLog "     $CDATE ..."
		cd $DFO_CAL_DIR/$CDATE

# since we haven't used moveProducts directly, we call renameProducts now explicitly
		writeLog "	Rename back pre-existing mcalibs ..."
		CHECK_EXIST=`ls ${MCAL_CODE}_*.fits 2>/dev/null | head -1`
		if [ Q$CHECK_EXIST != Q ]
		then
			dfits ${MCAL_CODE}_*.fits | fitsort -d PIPEFILE | sed "s/^.*/mv &/" > rn_files_back
			source rn_files_back
		fi
		writeLog "	Call 'renameProducts -m CALIB -d $CDATE' ... "
		renameProducts -m CALIB -d $CDATE | sed "s/^.*/	[renameProducts]	&/" | tee -a $PLOG
# call the renaming file
		source rn_files | grep -v "are the same file"
# add entry in JOBS_INGEST
		echo "ingestProducts -m CALIB -d $CDATE" >> $DFO_JOB_DIR/JOBS_INGEST
	done

# make entries unique
	grep "^#" $DFO_JOB_DIR/JOBS_INGEST > $DFO_JOB_DIR/JOBS_INGEST1
	grep -v "^#" $DFO_JOB_DIR/JOBS_INGEST | sort -u | sort -k5,5 >> $DFO_JOB_DIR/JOBS_INGEST1
	mv $DFO_JOB_DIR/JOBS_INGEST1 $DFO_JOB_DIR/JOBS_INGEST
# update the MCALIB section of the SCIENCE ABs
	export FORCE_CHECK=YES

	writeLog "===================================================================="
	writeLog "9.2.3 Move SCIENCE XML and TXT files to \$DFO_LOG_DIR/$DATE ..." 
	mkdir $DFO_LOG_DIR/$DATE 2>/dev/null
	for AB in `cat $DFO_MON_DIR/AB_list_SCIENCE_$DATE | awk '{print $1}'`
	do
		AB_ROOT=`echo $AB | sed "s/.ab//"`
		XML=`echo $AB | sed "s/.ab/.xml/"`
		TXT=`echo $AB | sed "s/.ab/.txt/"`
		mv $DFO_AB_DIR/$XML $DFO_AB_DIR/$TXT $DFO_LOG_DIR/$DATE/ 2>/dev/null
# clean up HISTO
		rm -f $DFO_AB_DIR/HISTO/${AB_ROOT}*
	done
	writeLog "9.2.4 Call moveProducts -m SCIENCE -d $DATE ... (UPDATE_YN: $UPDATE_YN)"
else

	writeLog "===================================================================="
	writeLog "9.2 Call moveProducts -m $MODE -d $DATE ... (UPDATE_YN: $UPDATE_YN)"
fi
writeLog "    [logs and plots go to qcweb/$RELEASE]"
writeLog "    ..."

# modify $DFO_INSTRUMENT to $RELEASE to have the output properly distributed on DFO_WEB_SERVER
export ORIG_DFO_INSTRUMENT=$DFO_INSTRUMENT
export DFO_INSTRUMENT=$RELEASE
export COMPLETE_MODE=$COMPLETE_MODE	# for updateAB to know that it should also scan other $MCAL_DATEs

rm -f $TMP_DIR/ph_moveP_log
if [ $UPDATE_YN = YES ]
then
	moveProducts -m $MODE -d $DATE > $TMP_DIR/ph_moveP_log <<EOT
N
EOT
else
	moveProducts -m $MODE -d $DATE > $TMP_DIR/ph_moveP_log
fi

if [ $COMPLETE_MODE = YES ]
then
	export DFO_CAL_DIR=$ORIG_CAL_DIR
fi

if [ $DEEP_MODE = YES ]
then
	scp -o $SCP_TIMEOUT -o BatchMode=yes $DFO_AB_DIR/overview_${RUN_ID}.txt ${DFO_WEB_SERVER}:${INSTR_DIR}/logs/$DATE/ 
	mv $DFO_AB_DIR/overview_${RUN_ID}.txt $DFO_LOG_DIR/$DATE/
fi

cat $TMP_DIR/ph_moveP_log | sed "s/^.*/	&/" >> $PLOG

# =======================================================================
# 4.2 For MCAL_MODE = YES, call renameP separately, to check the names to 
#     be replaced
# =======================================================================

if [ $MCAL_MODE = YES ] && [ $REPLACE_LOGIC = SELECTIVE ] && [ -d $DFO_CAL_DIR/$DATE ]
then
	if [ ! -d $DFO_CAL_DIR/$DATE ]
	then
		writeLog "    ... no products found, skipped." 
	else
		CHECK_EXIST=`ls $DFO_CAL_DIR/$DATE/*fits | head -1`
		if [ Q$CHECK_EXIST = Q ]
		then
			writeLog "    ... no products found, skipped."
		else	
# find proper config file
			checkRNConfig

# remove cal_Finished set by moveProducts ...
			sed -i -e "/^cal_Finished $DATE .*/d" $DFO_MON_DIR/DFO_STATUS

			writeLog "9.3 Call 'renameProducts -m $MODE -d $DATE -c ${RNCONFIG}' ... "
			renameProducts -m $MODE -d $DATE -c ${RNCONFIG} | sed "s/^.*/	&/" >> $PLOG
			writeLog "    Check predicted filenames against archived ones ..."
			checkSelect | tee -a $PLOG

# we stop here if an issue has been discovered
			CHECK_DIFF=`grep "DIFF" $TMP_DIR/ph_check_newnames | head -1`

			if [ "Q$CHECK_DIFF" != Q ]
			then
				sed -i -e "/^cal_Updated $DATE .*/d" $DFO_MON_DIR/DFO_STATUS
				writeLog "***ERROR: Issue with calib names for $DATE; check out \$DFO_CAL_DIR/$DATE/rn_files."
				writeLog "Product files have not been renamed."
				writeLog "ToDo: Decide about the proper names, edit and call rn_files by hand."
				writeLog "Then, call '$TOOL_NAME -d $DATE -X -M' again." 

				scp -o $SCP_TIMEOUT -o BatchMode=yes $DFO_CAL_DIR/$DATE/RENAMED_ISSUE ${DFO_WEB_SERVER}:${INSTR_DIR}/logs/$DATE/ 1>/dev/null
# bring ABs back to $DFO_AB_DIR
				cp $DFO_LOG_DIR/$DATE/*.ab $DFO_AB_DIR
				writeLog "$DATE: Left to manual treatment."
				writeLog "                         "
				exit
			else
				writeLog "9.4 No issues with new filenames --> rename ..."
				cd $DFO_CAL_DIR/$DATE
				rm -f $TMP_DIR/ph_rn_check

# suppress trivial errors "are the same file" (if we have renamed already)
				$DFO_CAL_DIR/$DATE/rn_files 2>$TMP_DIR/ph_rn_check
				if [ -s $TMP_DIR/ph_rn_check ]
				then
					cat $TMP_DIR/ph_rn_check | grep -v "are the same file"
				fi
# now we update the ABs
				writeLog "9.5 Call updateAB again ..."
				for AB in `cat $DFO_MON_DIR/AB_list_CALIB_$DATE | awk '{print $1}'`
				do
					cp $DFO_LOG_DIR/$DATE/$AB $DFO_AB_DIR
# since we run updateAB again: remove duplicate entry
					sed -i -e "/updated by/d" $DFO_AB_DIR/$AB
				done

				updateAB -m CALIB -d $DATE >> $PLOG
			fi

# for the final getStatusAB, replace the BATCH_ID by the normal DATE value
			for AB in `cat $DFO_MON_DIR/AB_list_CALIB_$DATE | awk '{print $1}'`
			do
				mv $DFO_AB_DIR/$AB  $DFO_LOG_DIR/$DATE/
				sed -i -e "s/BATCH_ID.*/BATCH_ID	CALIB_$DATE/" $DFO_LOG_DIR/$DATE/$AB
			done
		fi
	fi
fi

# =======================================================================
# 4.3 plots (not done by moveP for SCIENCE)
# =======================================================================

if [ $MCAL_MODE = NO ]
then
	writeLog "9.3 scp SCIENCE QC plots ..."
	if [ -d $DFO_PLT_DIR/$DATE ]
	then
		cd $DFO_PLT_DIR/$DATE

# taken from moveProducts
		writeLog "  create plots.tar ..."
		tar cf plots.tar `ls | egrep ".jpg|.png|.gif|_cs.html"`
		writeLog "  scp plots.tar to ${DFO_WEB_SERVER}:${INSTR_DIR}/plots/$DATE ..."
		scp -o $SCP_TIMEOUT -o BatchMode=yes plots.tar ${DFO_WEB_SERVER}:${INSTR_DIR}/plots/$DATE/ 1>/dev/null

		writeLog "  untar plots.tar on ${DFO_WEB_SERVER} (background) ..."
		ssh -o $SCP_TIMEOUT -o BatchMode=yes $DFO_WEB_SERVER "bin/webTarUnpack ${INSTR_DIR}/plots/$DATE/ plots.tar" 1>/dev/null &
	else
		writeLog "  ... no plots found."
	fi
fi

# =======================================================================
# 4.3 call getStatusAB, cleanup CAL directories
#     It might be useful to use the flag -n to force the tool to NOT cleanup.
# =======================================================================
# getStatusAB 
writeLog "  final update of AB monitor ..."

# check for $DFO_MON_DIR/FINISHED directories
if [ $DEEP_MODE = NO ]
then
	FINAL_YR=`echo $DATE | cut -c1-4`
	FINAL_MN=`echo $DATE | cut -c1-7`

	mkdir $DFO_MON_DIR/FINISHED/$FINAL_YR 2>/dev/null
	mkdir $DFO_MON_DIR/FINISHED/$FINAL_YR/$FINAL_MN 2>/dev/null

	getStatusAB -d $DATE -H | sed "s/^.*/	&/"

# DEEP_MODE=YES: we sort into PERIOD directories (e.g. P94), then by run_ID; prepare for 'getStatusAB -H -R'
else
	PSHORT=`echo $DATE | cut -c3-4`
# 00,01 etc.: P100, P101 ...
	if [ $PSHORT -lt 60 ]
	then
		PERIOD=P1`echo $DATE | cut -c3-4`
	else
# 60, 94, 95 etc.: P60, P94, P95 ...
		PERIOD=P`echo $DATE | cut -c3-4`
	fi
	mkdir $DFO_MON_DIR/FINISHED/$PERIOD 2>/dev/null
	mkdir $DFO_MON_DIR/FINISHED/$PERIOD/$RUN_ID 2>/dev/null
	mkdir $DFO_MON_DIR/FINISHED/RUN 2>/dev/null
	getStatusAB -d $DATE -H -R $RUN_ID | sed "s/^.*/	&/"

# fill the DEEP_MODE.txt file
	
	if [ ! -s $DFO_MON_DIR/FINISHED/RUN/DEEP_MODE.txt ]
	then
		cat > $DFO_MON_DIR/FINISHED/RUN/DEEP_MODE.txt <<EOT
# This table is used to map the pseudo date and the RUN_ID
# for the DEEP project $RELEASE. Do not delete.
# It is filled by $TOOL_NAME and used by phoenixMonitor.
#===================================================================
# Pseudo-Date   RUN_ID
EOT
	fi
	CHECK_EXIST=`grep $RUN_ID $DFO_MON_DIR/FINISHED/RUN/DEEP_MODE.txt | grep -v "^#" | head -1`
	if [ "Q$CHECK_EXIST" = Q ]
	then
		echo "$DATE	$RUN_ID" >>  $DFO_MON_DIR/FINISHED/RUN/DEEP_MODE.txt
	fi
fi

export DFO_INSTRUMENT=$ORIG_DFO_INSTRUMENT

# cleanup CAL: only for MCAL_MODE=NO
if [ $MCAL_MODE = NO ] && [ $CLEANUP = NO ]
then
	writeLog "9.4 Cleanup \$DFO_CAL_DIR: skipped ..." 
elif [ $MCAL_MODE = NO ] && [ $CLEANUP = YES ]
then
	rm -f $TMP_DIR/ph_list_caldir

# NOTE: usually ABs have the proper DATE for mcalibs; 
# but for reprocessing projects there might exist the 2099-99-99 dummy date
# for DEEP, we delete the specific dummy date directory

	cd $DFO_CAL_DIR
	rm -f $TMP_DIR/ph_list_caldir
	ls -d 20* 2>/dev/null | grep -v 2099-99-99 > $TMP_DIR/ph_list_caldir

	if [ $DEEP_MODE = YES ]
	then
		writeLog "9.4 DEEP mode: Cleanup \$DFO_CAL_DIR/$DATE (background) ..."
		rm -rf $DATE &
	elif [ -s $TMP_DIR/ph_list_caldir ]
	then
# check for outdated: outside +/- 4 weeks around current DATE
		MIN=`qcdate $DATE -28`
		MAX=`qcdate $DATE +28`
		MIN1=`echo $MIN | sed "s/-//g"`
		MAX1=`echo $MAX | sed "s/-//g"`

		writeLog "9.4 Cleanup \$DFO_CAL_DIR (outside $MIN ... $MAX) ..."
		for D in `cat $TMP_DIR/ph_list_caldir`
		do
			D1=`echo $D | sed "s/-//g"`
			if [ $D1 -lt $MIN1 ] || [ $D1 -gt $MAX1 ]
			then
				writeLog "removed: $D"
				rm -r $D
			fi
		done
	elif [ -d 2099-99-99 ]
	then
		CHECK_MCAL=`ls 2099-99-99 | wc -l`
		if [ $CHECK_MCAL -gt 1000 ]
		then
			writeLog "9.4 Cleanup \$DFO_CAL_DIR/2099-99-99 (more than 1000 found) ..."
			rm -f 2099-99-99/*
		else
			writeLog "9.4 Cleanup \$DFO_CAL_DIR/2099-99-99: $CHECK_MCAL files found, no need to cleanup."
		fi
	else
		writeLog "9.4 Cleanup \$DFO_CAL_DIR: none found."
	fi	
fi

# save the execution log to AUTO_DAILY, otherwise it would be deleted by finishNight
if [ ! -d $DFO_MON_DIR/AUTO_DAILY ]
then
	mkdir $DFO_MON_DIR/AUTO_DAILY
fi

if [ $MCAL_MODE = YES ] && [ -s $DFO_MON_DIR/phoenix_FIRST_PART.log ]
then
	PLOG1=`basename $PLOG`
	cat $DFO_MON_DIR/phoenix_FIRST_PART.log $PLOG > $DFO_MON_DIR/AUTO_DAILY/$PLOG1
	rm $PLOG 
else
	mv $PLOG $DFO_MON_DIR/AUTO_DAILY/
fi

case $DEEP_MODE in
 "NO"  ) PLOG="$DFO_MON_DIR/AUTO_DAILY/phoenix_$DATE.log" ;;
 "YES" ) PLOG="$DFO_MON_DIR/AUTO_DAILY/phoenix_deep_$DATE.log" ;;
esac

# =======================================================================
# 5.0 Finish
# 5.1 provide statistics
# =======================================================================

if [ $MOVEP_ONLY = NO ]
then
	if [ $PROCESS_PART = NO ]
	then
		writeStats FINISHED
	fi
fi

# =======================================================================
# 5.2 finishNight
# =======================================================================
writeLog "10. Call finishNight"

finishNight -d $DATE -c <<EOT
C
Y

N
EOT

# QCJOB_CONCENTRATION=YES: remove remaining job files
if [ QCJOB_CONCENTRATION = YES ] && [ $MONTHLY_MODE = YES ]
then
	rm -f $DFO_JOB_DIR/conc_JOBS_PHOENIX_$MONTH execQC_SCIENCE_$MONTH
fi

# =======================================================================
# 5.3 call phoenixMonitor
# =======================================================================

if [ $DEEP_MODE = NO ]
then
	MON_MONTH=`echo $DATE | cut -c 1-7`
	writeLog "11.1 Call phoenixMonitor -m $MON_MONTH ..."
	phoenixMonitor -m $MON_MONTH 1>/dev/null
else
	writeLog "11.1 Call phoenixMonitor -P ..."
	phoenixMonitor -P 1>/dev/null
fi

# =======================================================================
# 5.4 call MON_PGI if configured
# =======================================================================

if [ Q$MON_PGI != Q ] 
then
	if [ ! -s $DFO_BIN_DIR/$MON_PGI ]
	then
		writeLog "ERROR: \$MON_PGI $MON_PGI defined in config file but not found under $DFO_BIN_DIR. Check and start again."
		exit
	fi

	# BWo, v4.1.3:
	if [ $DEEP_MODE = NO ]
	then
		writeLog "11.2 Apply configured MON_PGI: \$DFO_BIN_DIR/$MON_PGI -d $DATE ..."
		$DFO_BIN_DIR/$MON_PGI -d $DATE | sed "s/^.*/	&/" 
	else
		writeLog "11.2 Apply configured MON_PGI: \$DFO_BIN_DIR/$MON_PGI -d $RUN_ID ..."
		$DFO_BIN_DIR/$MON_PGI -d $RUN_ID | sed "s/^.*/	&/" 
	fi
	writeLog "... done."
else
	writeLog "11.2 MON_PGI: not configured."	
fi

$DFO_GUI_DIR/refresh_browser $DFO_MON_DIR/dfoMonitor.html &

if [ -s $DFO_JOB_DIR/JOBS_AUTO ]
then
	sed -i -e "/$DATE/d" $DFO_JOB_DIR/JOBS_AUTO
fi

if [ $MAIL_YN = YES ]
then
	echo "phoenix finished $DATE" > $TMP_DIR/ph_mail
	mail -s "PHOENIX finished $DATE" $OP_ADDRESS <$TMP_DIR/ph_mail
fi

rm -f $DFO_GUI_DIR/*${DATE}*

# =======================================================================
# 5.5 Update call of phoenixMonitor in JOBS_INGEST
# =======================================================================

if [ $DEEP_MODE = YES ]
then
	sed -i -e "/^phoenixMonitor -P/d" $DFO_JOB_DIR/JOBS_INGEST
	echo "phoenixMonitor -P" >> $DFO_JOB_DIR/JOBS_INGEST
fi

# =======================================================================
# 5.6 upload processing info from SLAVE_ACCOUNT to MASTER_ACCOUNT
#	(for multi-account processing)
# =======================================================================

if [ $IS_SLAVE = YES ]
then
	writeLog "12. Export process info to $MASTER_ACCOUNT ..."

# add entry in master $STAT_FILE
	writeLog "   scp entry for $STAT_FILE to $MASTER_ACCOUNT"
	grep "^$DATE" $STAT_FILE > $TMP_DIR/NEW_LINE

# don't replace by STAT_FILE here
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$TMP_DIR/NEW_LINE \$DFO_MON_DIR; cat \$DFO_MON_DIR/NEW_LINE >> \$DFO_MON_DIR/PHOENIX_DAILY_$RELEASE" 2>/dev/null

# manage DFO_MON_DIR/FINISHED directories
	SLV_MON_DIR=`echo $DFO_MON_DIR`
	SLV_LST_DIR=`echo $DFO_LST_DIR`
	SLV_LOG_DIR=`echo $DFO_LOG_DIR`
	SLV_PLT_DIR=`echo $DFO_PLT_DIR`
	SLV_SCI_DIR=`echo $DFO_SCI_DIR`
	
	writeLog "   scp comments to $MASTER_ACCOUNT ..."
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "mkdir \$DFO_MON_DIR/FINISHED/$FINAL_YR; mkdir \$DFO_MON_DIR/FINISHED/$FINAL_YR/$FINAL_MN" 2>/dev/null
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp $SLAVE_ACCOUNT:$SLV_MON_DIR/FINISHED/$FINAL_YR/$FINAL_MN/*${DATE}* \$DFO_MON_DIR/FINISHED/$FINAL_YR/$FINAL_MN" 2>/dev/null
# prodcom and rawcom comments
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_LST_DIR/PRODCOM/*_$DATE.txt \$DFO_LST_DIR/PRODCOM" 2>/dev/null
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_LST_DIR/RAWCOM/*_$DATE.txt  \$DFO_LST_DIR/RAWCOM" 2>/dev/null
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_LST_DIR/*_$DATE*  \$DFO_LST_DIR" 2>/dev/null
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_LST_DIR/REPORT/*_$DATE*  \$DFO_LST_DIR/REPORT" 2>/dev/null

# put logs and plots to MASTER
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "mkdir \$DFO_PLT_DIR/$DATE; mkdir \$DFO_LOG_DIR/$DATE" 2>/dev/null

	if [ -s $DFO_PLT_DIR/$DATE/plots.tar ]
	then
		writeLog "   scp plots.tar to $MASTER_ACCOUNT \$DFO_PLT_DIR/$DATE ..."
		ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_PLT_DIR/$DATE/plots.tar \$DFO_PLT_DIR/$DATE" 2>/dev/null
		ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "cd  \$DFO_PLT_DIR/$DATE; tar xvf plots.tar; rm plots.tar" 1>/dev/null 2>/dev/null
	fi

	if [ -d $DFO_LOG_DIR/$DATE ]
	then
		writeLog "   scp logs.tar to $MASTER_ACCOUNT \$DFO_LOG_DIR/$DATE ..."
		cd $DFO_LOG_DIR/$DATE
		tar cf logs.tar `ls | grep -v tar`
		ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes  $SLAVE_ACCOUNT:$SLV_LOG_DIR/$DATE/logs.tar \$DFO_LOG_DIR/$DATE" 2>/dev/null
		ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "cd  \$DFO_LOG_DIR/$DATE; tar xvf logs.tar; rm logs.tar" 1>/dev/null 2>/dev/null
		rm -f logs.tar
		cd $DFO_PLT_DIR/$DATE
	fi

	writeLog "   Mark \$DFO_SCI_DIR on $MASTER_ACCOUNT ..."
	echo "processed on $SLAVE_ACCOUNT" > $DFO_SCI_DIR/$DATE/PROCESSED_SLAVE
       	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "mkdir \$DFO_SCI_DIR/$DATE; scp -o BatchMode=yes $SLAVE_ACCOUNT:$SLV_SCI_DIR/$DATE/PROCESSED_SLAVE \$DFO_SCI_DIR/$DATE" 2>/dev/null

	writeLog "   Now call phoenixMonitor -d $DATE on $MASTER_ACCOUNT to refresh links ..."
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "phoenixMonitor -d $DATE" 1>/dev/null 2>/dev/null
	writeLog "   Update links of exported score HTML files ..."
	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "sed -i -e \"s|${SLV_ROOT}|${MST_ROOT}|\"g  \$DFO_LOG_DIR/$DATE/*.html" 2>/dev/null

	writeLog "   scp processing log to $MASTER_ACCOUNT"
       	ssh -o $SCP_TIMEOUT -o BatchMode=yes $MASTER_ACCOUNT "scp -o BatchMode=yes $SLAVE_ACCOUNT:$PLOG \$DFO_MON_DIR/AUTO_DAILY" 2>/dev/null
fi

if [ $DEEP_MODE = NO ]
then
	writeLog "... phoenix for $DATE terminated."
else
	writeLog "... phoenix (DEEP mode) for pseudo-date $DATE (runID $RUN_ID) terminated."
fi
echo ""
rm -f $DFO_PLT_DIR/$DATE/plots.tar

exit
