Browse Source

Lucia specs integration

Pierre-Yves Barriat 1 năm trước cách đây
mục cha
commit
09f45e96b2

+ 1957 - 0
runtime/classic/EC00.sh

@@ -0,0 +1,1957 @@
+#!/bin/bash
+
+#
+# Job options 
+#
+#SBATCH --job-name=EC00
+#SBATCH --time=12:00:00
+#SBATCH --account=ecearth
+#
+#SBATCH --nodes=4
+#SBATCH --exclusive
+#SBATCH --ntasks-per-node=102
+#SBATCH --partition=batch
+#
+set -ueo pipefail
+#
+LOCAL_NODES=4
+LOCAL_TASKS=408
+#
+stdout_file=${SLURM_SUBMIT_DIR-$PWD}/${SLURM_JOB_NAME-"local"}_${SLURM_JOB_ID-"id"}.log
+exec > ${stdout_file}
+echo "------------------ Job Info --------------------"
+echo "jobid : ${SLURM_JOB_ID-"id"}"
+echo "jobname : ${SLURM_JOB_NAME-"local"}"
+echo "nodename : ${SLURMD_NODENAME-"local"}"
+echo "# nodes : ${SLURM_JOB_NUM_NODES-$LOCAL_NODES}"
+echo "# tasks : ${SLURM_NTASKS-$LOCAL_TASKS}"
+echo "submit dir : ${SLURM_SUBMIT_DIR-$PWD}"
+
+set -ue
+#
+# Cluster variables
+#
+NB_CORES_PER_NODES=128
+MAX_CORES_PER_NODES=100
+LIST_CORES_SOCKET=`seq -s',' 0 $((NB_CORES_PER_NODES-1))`
+#
+# Directories
+start_dir=${SLURM_SUBMIT_DIR-$PWD}
+
+# librunscript defines some helper functions
+. ${start_dir}/librunscript.sh
+
+# =============================================================================
+# *** BEGIN User configuration
+# =============================================================================
+
+# -----------------------------------------------------------------------------
+# *** General configuration
+# -----------------------------------------------------------------------------
+# Component configuration (for syntax of the $config variable, see librunscript.sh)
+#
+# Currently maintained:
+#     config="ifs amip oasis"                            # "GCM forced-SST" : IFS + AMIP
+#     config="ifs amip oasis lpjg:fdbck"                 # "Veg"            : forced-GCM + LPJ-Guess
+#     config="ifs amip oasis tm5:chem,o3fb,ch4fb,aerfb"  # "AerChem"        : forced-GCM + TM5
+#
+#     config="ifs nemo lim3 rnfmapper xios:detached oasis"                                 # "GCM"     : IFS+NEMO
+#     config="ifs nemo lim3 rnfmapper xios:detached oasis lpjg:fdbck"                      # "Veg"     : GCM+LPJ-Guess
+#     config="ifs nemo lim3 rnfmapper xios:detached oasis pisces lpjg:fdbck tm5:co2,co2fb" # "C-cycle" : GCM+LPJG+TM5
+#     config="ifs nemo lim3 rnfmapper xios:detached oasis tm5:chem,o3fb,ch4fb,aerfb"       # "AerChem" : GCM+TM5
+#
+#     config="ifs nemo pisces lim3 rnfmapper xios:detached oasis"     # "GCM"     : IFS+NEMO+PISCES
+#
+
+config="ifs nemo lim3 rnfmapper xios:detached oasis"
+
+# minimum sanity
+has_config amip nemo && error "Cannot have both nemo and amip in config!!"
+! has_config ifs && error "The ESM script requires ifs in config"
+
+# Experiment name (exactly 4 letters!)
+export exp_name=EC00
+
+# Simulation start and end date. Use any (reasonable) syntax you want.
+run_start_date="1850-01-01"
+run_end_date="${run_start_date} + 10 years"
+
+# Set $force_run_from_scratch to 'true' if you want to force this run to start
+# from scratch, possibly ignoring any restart files present in the run
+# directory. Leave set to 'false' otherwise.
+# NOTE: If set to 'true' the run directory $run_dir is cleaned!
+force_run_from_scratch=true
+special_restart=false
+special_restart_from=ECE3
+special_restart_date="1995-01-01"
+
+# Resolution (TM5 resolution is set at compilation)
+ifs_grid=T255L91
+nem_grid=ORCA1L75
+
+# Restart frequency. Use any (reasonable) number and time unit you want.
+# For runs without restart, leave this variable empty
+rst_freq="1 year"
+
+# Number of restart legs to be run in one go
+run_num_legs=2
+
+# Coupling frequencies
+has_config ifs tm5  && cpl_freq_atm_ctm_hrs=6
+has_config ifs lpjg && cpl_freq_atm_lpjg_hrs=24
+
+# Don't change the coupling frequency because UPDCLIE (where SST and SIC
+# are updated) is called every 24 hours (hardcoded in ifs-36r4/src/ifs/utility/updtim.F90)
+has_config amip && cpl_freq_amip_sec=86400
+
+# Directories
+#start_dir=${PWD}
+ctrl_file_dir=${start_dir}/ctrl
+output_control_files_dir=${start_dir}/ctrl
+
+# Architecture
+build_arch=ecconf
+use_machinefile=%USE_MACHINEFILE%
+
+# This file is used to store information about restarts
+ece_info_file="ece.info"
+
+# -----------------------------------------------------------------------------
+# *** Read platform dependent configuration
+# -----------------------------------------------------------------------------
+. ${start_dir}/ecconf.cfg
+
+configure
+
+# -----------------------------------------------------------------------------
+# *** Time step settings
+# -----------------------------------------------------------------------------
+if has_config ifs
+then
+    case "${ifs_grid}" in
+
+        T159L*) ifs_time_step_sec=3600 ;;
+        T255L*) ifs_time_step_sec=2700 ;;
+        T511L*) ifs_time_step_sec=900  ;;
+
+        *)  error "Can't set time steps for unknown horizontal grid: ${ifs_grid}"
+            ;;
+    esac
+fi
+
+if has_config nemo
+then
+    case "${nem_grid}" in
+
+        ORCA1L*)   nem_time_step_sec=2700; lim_time_step_sec=2700 ;;
+        ORCA025L*) nem_time_step_sec=900 ; lim_time_step_sec=900  ;;
+
+        *)  error "Can't set time steps for unknown horizontal grid: ${nem_grid}"
+            ;;
+    esac
+fi
+
+if has_config ifs nemo
+then
+    case "${ifs_grid}--${nem_grid}" in
+
+        T159L*--ORCA1L*)
+            ifs_time_step_sec=3600; nem_time_step_sec=2700; lim_time_step_sec=2700; cpl_freq_atm_oce_sec=10800
+            ;;
+        T255L*--ORCA1L*)
+            ifs_time_step_sec=2700; nem_time_step_sec=2700; lim_time_step_sec=2700; cpl_freq_atm_oce_sec=2700
+            ;;
+        T511L*--ORCA025L*)
+            ifs_time_step_sec=900 ; nem_time_step_sec=900 ; lim_time_step_sec=900 ; cpl_freq_atm_oce_sec=2700
+            ;;
+
+        *)  error "Can't set time steps for unknown combination of horizontal grids: ${ifs_grid}-${nem_grid}"
+            ;;
+    esac
+fi
+
+# -----------------------------------------------------------------------------
+# *** IFS configuration
+# -----------------------------------------------------------------------------
+
+ifs_version=36r4
+
+ifs_di_freq=$(( 24 * 3600 / ifs_time_step_sec ))
+ifs_ddh_freq=$(( 120 * 3600 / ifs_time_step_sec ))
+
+export ifs_res_hor=$(echo ${ifs_grid} | sed 's:T\([0-9]\+\)L\([0-9]\+\):\1:')
+ifs_res_ver=$(echo ${ifs_grid} | sed 's:T\([0-9]\+\)L\([0-9]\+\):\2:')
+
+ifs_numproc=320
+
+ifs_exe_file=${ecearth_src_dir}/ifs-${ifs_version}/bin/ifsmaster-${build_arch}
+
+ifs_lastout=false
+
+# USE FORCING FROM CMIP5 (SOLAR, GHG, AEROSOL, O3)
+ifs_cmip5=TRUE
+# SWITCH FOR RCP AND HISTORICAL RUNS FOR CMIP5 (0=HISTO 1=RCP 3-PD, 2=RCP 4.5, 3=RCP 6.0, 4=RCP 8.5)
+ifs_cmip5_rcp=0
+# 1PCTCO2 and A4XCO2 should operate together with ifs_cmip_fixyear years, ie., the baseline GHGs level will use that for year ifs_cmip_fixyear
+export ifs_cmip_fixyear=0
+export ifs_cmip_fixyear_ch4=0
+
+# Repeat trap from ifs/suecrad.F90 for early catch
+if ! has_config tm5:ch4fb && (( $ifs_cmip_fixyear != $ifs_cmip_fixyear_ch4 ))
+then
+    error 'CH4 in IFS is not provided by TM5, NCMIPFIXYR_CH4 should be set equal to NCMIPFIXYR'
+fi
+
+# USE FORCING FROM CMIP6 (HAS PRIORITY OVER LCMIP5)
+ifs_cmip6=TRUE
+# MAC-SP anthropogenic simple plume model (parameterization of anthropogenic aerosol optical properties)
+ifs_mac2sp=TRUE
+# Use CMIP6 prescribed preindustrial aerosol
+ifs_cmip6piaer=TRUE
+
+# !! scenario 'historical' max 2014
+# 1850          (o3_pi)
+# 1850 --> 2014 (o3_histo)
+# !! other scenarios (> 2014), default: SSP3-7.0
+# SSP1-1.9, SSP1-2.6, SSP1-2.6-Ext, SSP2-4.5, SSP3-7.0, SSP3-LowNTCF, SSP4-3.4, SSP5-3.4-OS, SSP4-6.0, SSP5-3.4-OS-Ext, SSP5-8.5, SSP5-8.5-Ext
+ifs_cmip6_scenario=historical
+
+# Enable optional COVID-19 scenarios, will enforce ifs_cmip6_scenario=SSP2-4.5
+ifs_covid19=FALSE
+# Choose one scenario : Base TwoYearBlip ModerateGreen StrongGreen FossilFuel
+ifs_covid19scen=Base
+# Basic sanity checks
+if [ ${ifs_covid19} == TRUE ] ; then
+    [ ${ifs_cmip6_scenario} != SSP2-4.5 ] && echo "*WARNING* with ifs_covid19=TRUE IFS uses ifs_cmip6_scenario=SSP2-4.5 not ${ifs_cmip6_scenario}"
+    has_config any lpjg pisces tm5 && error "ifs_covid19=TRUE is not supported with LPJ-GUESS, PISCES nor TM5"
+fi
+
+# Read CMIP6 stratospheric aerosol data file, vertically integrated version
+lcmip6_strataer_simp=FALSE
+lcmip6_strataer_full=TRUE
+lcmip6_strataer_bckgd=FALSE
+
+# for an abrupt increase of CO2  to 4x the starting year (ifs_cmip_fixyear)
+export ifs_A4xCO2=FALSE
+# for an 1% per year increase of CO2 until reaching 4x the starting year (ifs_cmip_fixyear)
+export ifs_1PCTCO2=FALSE
+export bgc_1PCTCO2=FALSE
+
+# Time-varying orbital forcing (Qiong Zhang, SU-2013-09)
+# https://dev.ec-earth.org/projects/ecearth3/wiki/Orbital_forcing_in_EC-Earth_3
+#
+#   ifs_orb_switch=false, no orbital calculations applied
+#   ifs_orb_switch=true, use orbital calculations according to ifs_orb_mode
+#   ifs_orb_mode="fixed_year", or "variable_year", or "fixed_parameters"
+#     fixed_year: calculate the orbital parameters at ifs_orb_iyear, e.g.,1850
+#     variable_year: calculate orbital parameters annually start from ifs_orb_iyear
+#     fixed_parameters: prescribe orbital parameters for given year
+case "${ifs_grid}" in
+    T159*) ifs_orb_switch=true ;;
+    *)     ifs_orb_switch=false ;;
+esac
+ifs_orb_mode="variable_year"
+ifs_orb_iyear=$(date -u -d "${run_start_date}" +%Y)
+
+# Relaxation of soil moisture (Wilhelm May, LU; October 2017)
+#  
+# LRXSM: Parameter indicating the levels to be nudged 
+#
+#   LRXSM =  0: no nudging 
+#   LRXSM = 12: 4xdaily data and 3 levels (excluding level 1)      
+#   LRXSM = 13: 4xdaily data and 4 levels
+#
+# LRXSMTx: time scale of the relaxation for level X (in hours)
+#  
+#   LRXSMTx =   0: actual values relpaced by external ones 
+#   LRXSMTx =  24: 1 day
+#   LRXSMTx = 120: 5 days
+#
+# LRXSMS: indicates when the relaxation is done 
+#
+#   LRXSMS = 0: before the time step
+#   LRXSMS = 1:  after the time step
+#
+has_config soilnudg && ifs_lrxsm=13 || ifs_lrxsm=0
+ifs_lrxsmt1=96
+ifs_lrxsmt2=72
+ifs_lrxsmt3=48
+ifs_lrxsmt4=24
+ifs_lrxsms=1
+
+# IFS tuning parameters
+variant=
+has_config tm5:chem && variant=-AerChem
+ifs_tuning_parameter_file=${ctrl_file_dir}/ifs-tuning-parameters-${ifs_grid}${variant}.sh
+if [ -f ${ifs_tuning_parameter_file} ]
+then
+    source ${ifs_tuning_parameter_file}
+else
+    error "Sorry, ${ifs_tuning_parameter_file} not found, exiting."
+fi
+
+
+# Select source of vegetation data:
+#  ifs       climatology from IFS
+#  era20c    vegetation from an off-line LPJ-Guess run forced with ERA20C
+#            (currently available only for T255 and T159)
+#  cmip6     vegetation from an EC-Earth3-Veg (interactive LPJ-Guess) run 
+#            (currently available only for T255)
+#  custom_exp vegetation from any EC-Earth3-Veg run exp
+#            (must contain same variables as era20c & cmip6 and located in icmcl_exp folder)
+#  none      don't create an ICMCL file with vegetation data (this is set
+#            automatically if LPJG is used with feedback)
+#
+ifs_veg_source="cmip6"
+
+has_config lpjg:fdbck && ifs_veg_source="none"
+
+case ${ifs_veg_source} in
+"ifs" )
+    # Use Lambert-Beer to compute effective vegetation cover
+    n_compute_eff_veg_fraction=2
+    ;;
+"era20c" )
+    # LPJG vegetation is provided as effective cover
+    # Don't use Lambert-Beer
+    n_compute_eff_veg_fraction=0
+
+    case "${ifs_grid}" in
+        T159L*) veg_version=v29 ;;
+        T255L*) veg_version=v16 ;;
+        *)  error "Vegetation from off-line LPJ-Guess not available for ${ifs_grid}" ;;
+    esac    
+    ;;
+"cmip6" )
+    # LPJG vegetation is provided as effective cover
+    # Don't use Lambert-Beer
+    n_compute_eff_veg_fraction=0
+
+    case "${ifs_grid}" in
+        T255L*) veg_version=v32 ;;
+        *)  error "Vegetation from CMIP6 EC-Earth3-Veg not available for ${ifs_grid}" ;;
+    esac    
+    ;;
+"custom_"* )
+    # LPJG vegetation is provided as effective cover
+    # Don't use Lambert-Beer
+    n_compute_eff_veg_fraction=0
+
+    veg_version=${ifs_veg_source:7}
+    if [ ! -d ${ini_data_dir}/ifs/${ifs_grid}/icmcl_${veg_version} ]
+    then
+        error "requested IFS_VEG_SOURCE = ${ifs_veg_source} but not found in ${ini_data_dir}/ifs/${ifs_grid}/icmcl_${veg_version}"
+    fi
+    ;;
+"none" )
+    # LPJG with feedback
+    n_compute_eff_veg_fraction=0
+    ! has_config lpjg:fdbck && error "IFS requires an offline source of vegetation"
+    ;;
+* )
+    error "Vegetation from ${ifs_veg_source} not implemented"
+    ;;
+esac
+
+# use DMI land ice physics and varying snow albedo
+case "${ifs_grid}" in
+    T159*) ifs_landice=true ;;
+    *)     ifs_landice=false ;;
+esac
+
+# -----------------------------------------------------------------------------
+# *** NEMO/LIM configuration
+# -----------------------------------------------------------------------------
+
+# This is only needed if the experiment is started from an existing set of NEMO
+# restart files
+nem_restart_file_path=${start_dir}/nemo-rst
+
+nem_restart_offset=0
+
+nem_res_hor=$(echo ${nem_grid} | sed 's:ORCA\([0-9]\+\)L[0-9]\+:\1:')
+
+nem_config=${nem_grid}
+has_config lim3           && nem_config=${nem_config}_LIM3
+
+if has_config pisces tm5:co2
+then
+    nem_config=${nem_config}_CarbonCycle
+elif has_config pisces
+then
+    nem_config=${nem_config}_PISCES
+fi
+
+# TODO - nemo standalone configs are not accounted for in this script, but this would set the required nem_config
+! has_config ifs && nem_config=${nem_config}_standalone
+
+nem_exe_file=${ecearth_src_dir}/nemo-3.6/CONFIG/${nem_config}/BLD/bin/nemo.exe
+
+nem_numproc=86
+
+# Thermal conductivity of snow, see comment in ctrl/namelist.lim3.ref.sh
+case "${ifs_grid}" in
+    T159L* ) nem_rn_cdsn=0.25 ;;
+    * )      nem_rn_cdsn=0.27 ;;
+esac
+
+# -----------------------------------------------------------------------------
+# *** Runoff mapper configuration
+# -----------------------------------------------------------------------------
+
+rnf_exe_file=${ecearth_src_dir}/runoff-mapper/bin/runoff-mapper.exe
+rnf_numproc=1
+
+# -----------------------------------------------------------------------------
+# *** LPJ-GUESS configuration
+# -----------------------------------------------------------------------------
+
+lpjg_time_step_sec=86400
+lpjg_numproc=32
+
+has_config lpjg       && lpjg_on=1
+has_config lpjg:fdbck && lpjg_fdbck=1
+has_config tm5:co2    && lpjg_fdbck_tm5=1 || lpjg_fdbck_tm5=0
+
+export lpjg_fixNdepafter=-1
+export lpjg_fixLUafter=-1
+
+info '!!!! CMIP FIX YEAR SETTINGS:'
+info "ifs_cmip_fixyear:  $ifs_cmip_fixyear"
+info "lpjg_fixNDepAfter: $lpjg_fixNdepafter"
+info "lpjg_fixLUAfter:   $lpjg_fixLUafter"
+info '!!!!'  
+
+lpjg_res=T${ifs_res_hor}
+lpjg_exe_file=${ecearth_src_dir}/lpjg/build/guess_${lpjg_res}
+
+# -----------------------------------------------------------------------------
+# *** AMIP-reader configuration
+# -----------------------------------------------------------------------------
+
+amip_exe_file=${ecearth_src_dir}/amip-forcing/bin/amip-forcing.exe
+amip_numproc=1
+
+# -----------------------------------------------------------------------------
+# *** TM5 configuration
+# -----------------------------------------------------------------------------
+
+if $(has_config tm5)
+then
+    # With TM5, NPRTRV is set to 1 in the namelist. To avoid some out-of-bound
+    # arrays in IFS, we must limit the number of cores for IFS
+    if (( ifs_numproc > (ifs_res_hor+1) ))
+    then
+        error "too much cores requested for IFS, max is $((ifs_res_hor+1))"
+    fi
+
+    # TM5 settings
+    has_config tm5:co2 && tmversion="co2" || tmversion="cb05"
+    has_config tm5:co2 && export tm5_co2=1 || export tm5_co2=0
+    export tm5_exch_nlevs=10
+    tm5_time_step_sec=3600
+    export tm5_numproc_x=2
+    export tm5_numproc_y=45
+    tm5_numproc=$(( tm5_numproc_x * tm5_numproc_y ))
+    export tm5_emiss_fixyear=0
+
+    # limited number of levels for feedback (aerosols, currently set to lmax_conv in TM5)
+    case ${tm5_exch_nlevs} in
+        34) export tm5_exch_nlevs_cutoff=23 ;;
+        10) export tm5_exch_nlevs_cutoff=10 ;;
+         4) export tm5_exch_nlevs_cutoff=4  ;;
+         *) error "not supported number of levels for TM5"
+    esac
+
+    # executable
+    tm5_exe_file=${ecearth_src_dir}/tm5mp/build-${tmversion}-ml${tm5_exch_nlevs}/appl-tm5-${tmversion}.x
+
+    # path to initial conditions, modify as needed
+    tm5_restart_file_path=${ini_data_dir}/tm5/restart/${tmversion}-ml${tm5_exch_nlevs}
+
+    # fields sent back to IFS
+    has_config tm5:o3fb   && tm5_to_ifs=O3 || tm5_to_ifs=
+    has_config tm5:ch4fb  && tm5_to_ifs=${tm5_to_ifs},CH4
+    has_config tm5:aerfb && tm5_to_ifs=${tm5_to_ifs},"\
+N2,SU2,BC2,OC2,N3,SU3,BC3,OC3,SS3,DU3,\
+N4,SU4,BC4,OC4,SS4,DU4,N5,BC5,OC5,N6,DU6,N7,DU7,\
+NO3,MSA,\
+AOD_01,AOD_02,AOD_03,AOD_04,AOD_05,AOD_06,AOD_07,AOD_08,AOD_09,AOD_10,AOD_11,AOD_12,AOD_13,AOD_14,\
+SSA_01,SSA_02,SSA_03,SSA_04,SSA_05,SSA_06,SSA_07,SSA_08,SSA_09,SSA_10,SSA_11,SSA_12,SSA_13,SSA_14,\
+ASF_01,ASF_02,ASF_03,ASF_04,ASF_05,ASF_06,ASF_07,ASF_08,ASF_09,ASF_10,ASF_11,ASF_12,ASF_13,ASF_14"
+    has_config tm5:co2fb && tm5_to_ifs=${tm5_to_ifs},CO2
+
+    export tm5_to_ifs=$(echo ${tm5_to_ifs} | sed "s/^,//")
+
+    # coupled to LPJ-Guess and/or PISCES?
+    has_config tm5:co2 lpjg   && export cpl_tm_guess=T  || export cpl_tm_guess=F
+    has_config tm5:co2 pisces && export cpl_tm_pisces=T || export cpl_tm_pisces=F
+fi
+
+# -----------------------------------------------------------------------------
+# *** OASIS configuration
+# -----------------------------------------------------------------------------
+
+# Restart files for the coupling fields (note 8 character limit in OASIS)
+#   rstas.nc : atmosphere single-category fields
+#   rstam.nc : atmosphere multi-category fields
+#   rstos.nc : ocean single-category fields
+#   rstom.nc : ocean multi-category fields
+oas_rst_ifs_nemo="rstas.nc rstos.nc"
+
+oas_rst_ifs_lpjg="vegin.nc lpjgv.nc"
+
+# Met fields from IFS to TM (always required)
+oas_rst_ifs_tm5="r_hum.nc r_g2d.nc r_udr.nc r_div.nc r_vor.nc \
+                 r_ddr.nc r_tmp.nc r_dmf.nc r_s2d.nc r_umf.nc"
+
+has_config tm5:chem && \
+    oas_rst_ifs_tm5=${oas_rst_ifs_tm5}' r_cc_.nc r_clw.nc r_cco.nc r_ciw.nc r_ccu.nc'
+
+has_config tm5:o3fb || has_config tm5:ch4fb && oas_rst_ifs_tm5=$oas_rst_ifs_tm5' o3ch4.nc'
+has_config tm5:aerfb && oas_rst_ifs_tm5=$oas_rst_ifs_tm5' C???????'
+
+# C-cycle configuration
+has_config tm5:co2 lpjg   && oas_rst_ifs_tm5=$oas_rst_ifs_tm5' l_co2.nc rlpjg.nc'
+has_config tm5:co2 pisces && oas_rst_ifs_tm5=$oas_rst_ifs_tm5' o_co2.nc pisce.nc'
+has_config tm5:co2fb      && oas_rst_ifs_tm5=$oas_rst_ifs_tm5' co2mx.nc'
+
+# final list of files depends on the activated components - this is used in save_ic as well
+#oas_rst_files="${oas_rst_ifs_nemo} ${oas_rst_ifs_tm5} vegin.nc lpjgv.nc"
+oas_rst_files=""
+has_config ifs nemo && oas_rst_files+=" ${oas_rst_ifs_nemo}"
+has_config ifs lpjg && oas_rst_files+=" ${oas_rst_ifs_lpjg}"
+has_config ifs tm5 && oas_rst_files+=" ${oas_rst_ifs_tm5}"
+
+# Decide whether the OASIS weight files for interpolation should be linked from
+# the setup directory (true) or not (false). In the latter case, the weights
+# are re-computed at the start of the run.
+oas_link_weights=true
+
+# Flux correction for runoff (not calving) sent from Oasis to ocean.
+# 1.07945 is computed to compensate for a P-E=-0.016 mm/day (valid for std res)
+case "${ifs_grid}" in
+    T159L* ) has_config nemo && oas_mb_fluxcorr=1.08652 ;;
+    * ) has_config nemo && oas_mb_fluxcorr=1.07945 ;;
+esac
+
+# -----------------------------------------------------------------------------
+# *** XIOS configuration
+# -----------------------------------------------------------------------------
+
+xio_exe_file=${ecearth_src_dir}/xios-2.5/bin/xios_server.exe
+
+xio_numproc=1
+
+# -----------------------------------------------------------------------------
+# *** Extra initial conditions saved during the run
+# -----------------------------------------------------------------------------
+if has_config save_ic
+then
+    source ./libsave_ic.sh
+    declare -a save_ic_date save_ic_date1 save_ic_sec save_ic_day save_ic_ppt_file save_ic_nemo_ts
+    oas_rst_files="${oas_rst_ifs_nemo} ${oas_rst_ifs_tm5} vegin.nc lpjgv.nc"
+fi
+
+# -----------------------------------------------------------------------------
+# *** Carbon cycle configuration
+# -----------------------------------------------------------------------------
+# set to true to write co2 fluxes sent to TM5
+ccycle_debug_fluxes=true
+
+# =============================================================================
+# *** END of User configuration
+# =============================================================================
+
+# =============================================================================
+# *** This is where the code begins ...
+# =============================================================================
+
+# -----------------------------------------------------------------------------
+# *** Create the run dir if necessary and go there
+#     Everything is done from here.
+# -----------------------------------------------------------------------------
+if [ ! -d ${run_dir} ]
+then
+    mkdir -p ${run_dir}
+    if $special_restart
+    then
+        force_run_from_scratch=false
+        echo 'rsync -av --delete ${run_dir}/../${special_restart_from}/ --exclude log --exclude output --exclude restart --exclude="${special_restart_from}_*" --exclude="srf*" --exclude="restart_*" --exclude="debug.*" --ex     clude="output.*" ${run_dir}'
+        rsync -av --delete ${run_dir}/../${special_restart_from}/ --exclude log --exclude output --exclude restart --exclude="${special_restart_from}_*" --exclude="srf*" --exclude="restart_*" --exclude="debug.*" --exclude="output.*" ${run_dir}
+        cp -f ${nem_exe_file} ${run_dir}
+        cp -f ${ifs_exe_file} ${run_dir}
+        cp -f ${rnf_exe_file} ${run_dir}
+        cp -f ${xio_exe_file} ${run_dir}
+        special_year=${special_restart_date:0:4}
+        sed -i "/$special_year/q" ${run_dir}/ece.info
+        . ${run_dir}/ece.info
+        special_restart_leg=$(printf %03d $((leg_number+1)))
+        special_restart_leg_oasis=$(printf %03d $((leg_number+2)))
+        # PUT HERE THE INSTRUCTIONS TO COPY THE restart files
+        rsync -av ${run_dir}/../../archive/${special_restart_from}/restart/ifs/${special_restart_leg}/ ${run_dir}
+        rsync -av ${run_dir}/../../archive/${special_restart_from}/restart/oasis/${special_restart_leg_oasis}/ ${run_dir}
+        cd ${run_dir}/../../archive/${special_restart_from}/restart/nemo/${special_restart_leg}
+        for f in *.nc; do
+            nf=${exp_name}${f:4}
+            cp $f ${run_dir}/$nf
+        done
+        cd -
+        cd ${run_dir}
+        for f in ${exp_name}_????????_restart_???_????.nc; do
+            nf=${f:14}
+            ln -s $f $nf
+        done
+        cd -
+        rm -f ${run_dir}/ICMCL${special_restart_from}INIT
+        mv ${run_dir}/ICMGG${special_restart_from}INIUA ${run_dir}/ICMGG${exp_name}INIUA
+        mv ${run_dir}/ICMGG${special_restart_from}INIT ${run_dir}/ICMGG${exp_name}INIT
+        mv ${run_dir}/ICMSH${special_restart_from}INIT ${run_dir}/ICMSH${exp_name}INIT
+    fi
+else
+    force_run_from_scratch=false
+    special_restart=false
+fi
+cd ${run_dir}
+
+# -----------------------------------------------------------------------------
+# *** Determine the time span of this run and whether it's a restart leg
+# -----------------------------------------------------------------------------
+
+# Regularise the format of the start and end date of the simulation
+run_start_date=$(date -uR -d "${run_start_date}")
+run_end_date=$(date -uR -d "${run_end_date}")
+
+
+# -----------------------------------------------------------------------------
+# *** Set path to grib_set
+# -----------------------------------------------------------------------------
+
+grib_set=${GRIB_BIN_PATH}${GRIB_BIN_PATH:+/}grib_set
+
+# Loop over the number of legs
+for (( ; run_num_legs>0 ; run_num_legs-- ))
+do
+
+    # Check for restart information file and set the current leg start date
+    #   Ignore restart information file if force_run_from_scratch is true
+    if ${force_run_from_scratch} || ! [ -r ${ece_info_file} ]
+    then
+        leg_is_restart=false
+        leg_start_date=${run_start_date}
+        leg_number=1
+    else
+        leg_is_restart=true
+        . ./${ece_info_file}
+        leg_start_date=${leg_end_date}
+        leg_number=$((leg_number+1))
+    fi
+
+    # Compute the end date of the current leg
+    if [ -n "${rst_freq}" ]
+    then
+        leg_end_date=$(date -uR -d "${leg_start_date} + ${rst_freq}")
+    else
+        leg_end_date=${run_end_date}
+    fi
+
+    # Check if legs are integer multiples of full years if LPJG is used
+    if has_config lpjg
+    then
+        
+        if [[ $(date +%m%d%T -u -d "${leg_start_date}") != "010100:00:00" || \
+            $(date +%m%d%T -u -d "${leg_start_date} + ${rst_freq}") != "010100:00:00" ]]
+        then
+            error "LPJ-GUESS runs must start on Jan 1 and end on Dec 31. Multi-year legs are allowed."
+        fi
+    fi              
+
+    if [ $(date -u -d "${leg_end_date}" +%s) -ge $(date -u -d "${run_end_date}" +%s) ]
+    then
+        leg_end_date=${run_end_date}
+        ifs_lastout=true
+    fi
+
+    # Some time variables needed later
+    leg_length_sec=$(( $(date -u -d "${leg_end_date}" +%s) - $(date -u -d "${leg_start_date}" +%s) ))
+    leg_start_sec=$(( $(date -u -d "${leg_start_date}" +%s) - $(date -u -d "${run_start_date}" +%s) ))
+    leg_end_sec=$(( $(date -u -d "${leg_end_date}" +%s) - $(date -u -d "${run_start_date}" +%s) ))
+    leg_start_date_yyyymmdd=$(date -u -d "${leg_start_date}" +%Y%m%d)
+    leg_start_date_yyyy=$(date -u -d "${leg_start_date}" +%Y)
+    leg_end_date_yyyy=$(date -u -d "${leg_end_date}" +%Y)
+
+    # Check whether there's actually time left to simulate - exit otherwise
+    if [ ${leg_length_sec} -le 0 ]
+    then
+        info "Leg start date equal to or after end of simulation."
+        info "Nothing left to do. Exiting."
+        exit 0
+    fi
+
+    # Initial conditions saved during the run
+    do_save_ic=false
+    has_config save_ic && save_ic_get_config
+    # if you do not use an option with save_ic, you must define 'do_save_ic' and
+    # 'save_ic_date_offset' here or in ../libsave_ic.sh/save_ic_get_config()
+    ${do_save_ic} && save_ic_define_vars
+
+    # -------------------------------------------------------------------------
+    # *** Prepare the run directory for a run from scratch
+    # -------------------------------------------------------------------------
+    if ! $leg_is_restart
+    then
+        # ---------------------------------------------------------------------
+        # *** Check if run dir is empty. If not, and if we are allowed to do so
+        #     by ${force_run_from_scratch}, remove everything
+        # ---------------------------------------------------------------------
+        if $(ls * >& /dev/null)
+        then
+            if ${force_run_from_scratch}
+            then
+                rm -fr ${run_dir}/*
+            else
+                error "Run directory not empty and \$force_run_from_scratch not set."
+            fi
+        fi
+
+        # ---------------------------------------------------------------------
+        # *** Copy executables of model components
+        # *** Additionally, create symlinks to the original place for reference
+        # ---------------------------------------------------------------------
+        cp    ${ifs_exe_file} .
+        ln -s ${ifs_exe_file} $(basename ${ifs_exe_file}).lnk
+
+        if $(has_config amip)
+        then
+            cp    ${amip_exe_file} .
+            ln -s ${amip_exe_file} $(basename ${amip_exe_file}).lnk
+        fi
+
+        if $(has_config nemo)
+        then
+            cp    ${nem_exe_file} .
+            ln -s ${nem_exe_file} $(basename ${nem_exe_file}).lnk
+
+            cp    ${rnf_exe_file} .
+            ln -s ${rnf_exe_file} $(basename ${rnf_exe_file}).lnk
+
+            cp    ${xio_exe_file} .
+            ln -s ${xio_exe_file} $(basename ${xio_exe_file}).lnk
+        fi
+
+        if $(has_config lpjg)
+        then
+            cp    ${lpjg_exe_file} .
+            ln -s ${lpjg_exe_file} $(basename ${lpjg_exe_file}).lnk
+        fi
+
+        if $(has_config tm5)
+        then
+            cp    ${tm5_exe_file} .
+            ln -s ${tm5_exe_file} $(basename ${tm5_exe_file}).lnk
+        fi
+
+        # ---------------------------------------------------------------------
+        # *** Files needed for IFS (linked)
+        # ---------------------------------------------------------------------
+
+        # Initial data
+        ln -s \
+        ${ini_data_dir}/ifs/${ifs_grid}/${leg_start_date_yyyymmdd}/ICMGGECE3INIUA \
+                                                            ICMGG${exp_name}INIUA
+        ln -s \
+        ${ini_data_dir}/ifs/${ifs_grid}/${leg_start_date_yyyymmdd}/ICMSHECE3INIT \
+                                                            ICMSH${exp_name}INIT
+        rm -f ICMGG${exp_name}INIT
+        cp ${ini_data_dir}/ifs/${ifs_grid}/${leg_start_date_yyyymmdd}/ICMGGECE3INIT \
+                                                            ICMGG${exp_name}INIT
+
+        # add bare_soil_albedo to ICMGG*INIT
+        tempfile=tmp.$$
+        ${grib_set} -s dataDate=$(date -u -d "$run_start_date" +%Y%m%d) \
+            ${ini_data_dir}/ifs/${ifs_grid}/climate/bare_soil_albedos.grb \
+            ${tempfile}
+
+        cat ${tempfile} >> ICMGG${exp_name}INIT
+        rm -f ${tempfile}
+
+        # add land ice mask if needed
+        if ${ifs_landice}
+        then
+            tempfile=tmp.$$
+            cdo divc,10 -setcode,82 -selcode,141 ICMGG${exp_name}INIT ${tempfile}
+            ${grib_set} -s gridType=reduced_gg ${tempfile} ${tempfile}
+            cat ${tempfile} >> ICMGG${exp_name}INIT
+            rm -f ${tempfile}
+        fi
+
+        # Other stuff
+        ln -s ${ini_data_dir}/ifs/rtables/* .
+
+        # Output control (ppt files)
+        if [ ! -f ${output_control_files_dir}/pptdddddd0600 ] &&  [ ! -f ${output_control_files_dir}/pptdddddd0300 ];then
+           echo "Error from ece-esm.sh: Neither the file pptdddddd0600 or pptdddddd0300 exists in the directory:"
+           echo " " ${output_control_files_dir}
+           exit -1
+        fi
+        mkdir postins
+        cp ${output_control_files_dir}/ppt* postins/
+        if [ -f postins/pptdddddd0600 ];then
+           ln -s pptdddddd0600 postins/pptdddddd0000
+           ln -s pptdddddd0600 postins/pptdddddd1200
+           ln -s pptdddddd0600 postins/pptdddddd1800
+        fi
+        if [ -f postins/pptdddddd0300 ];then
+           ln -s pptdddddd0300 postins/pptdddddd0900
+           ln -s pptdddddd0300 postins/pptdddddd1500
+           ln -s pptdddddd0300 postins/pptdddddd2100
+           if [ ! -f postins/pptdddddd0600 ];then
+               ln -s pptdddddd0300 postins/pptdddddd0000
+               ln -s pptdddddd0300 postins/pptdddddd0600
+               ln -s pptdddddd0300 postins/pptdddddd1200
+               ln -s pptdddddd0300 postins/pptdddddd1800
+           fi
+        fi
+        /bin/ls -1 postins/* > dirlist
+
+        # ---------------------------------------------------------------------
+        # *** Files needed for LPJ-GUESS
+        # ---------------------------------------------------------------------
+        if $(has_config lpjg)
+        then
+            # Check for valid grid
+            if [ $lpjg_res != "T255" -a $lpjg_res != "T159" ]
+            then
+                error "LPJG-gridlist doesn't exist for ifs-grid: ${ifs_grid}" 
+            fi
+            # Initial data - saved state for LPJ-GUESS (.bin format)
+            lpjgstartdir=$(printf "lpjg_state_%04d" $leg_start_date_yyyy)
+            ln -sf ${ini_data_dir}/lpjg/ini_state/${lpjg_res}/${lpjgstartdir} ${run_dir}/${lpjgstartdir}
+
+            # Control files (i.e. .ins, landuse, N deposition, soil type files etc.)
+            cp  -f ${ecearth_src_dir}/lpjg/data/ins/*.ins .
+            # activate the new litterfall scheme for C4MIP - for the coupled model this is done when both pisces and lpjg are activated
+            has_config pisces lpjg && echo -e "!override for EC-Earth-CC in runscript\nifpftlitterfall 1\ncalc_phen_after_restart 0" >> global.ins
+            mkdir -p ${run_dir}/landuse
+
+        fi
+
+        # ---------------------------------------------------------------------
+        # *** Files needed for NEMO (linked)
+        # ---------------------------------------------------------------------
+        if $(has_config nemo)
+        then
+            # Link initialisation files for matching ORCA grid
+            for f in \
+                bathy_meter.nc coordinates.nc \
+                ahmcoef.nc \
+                K1rowdrg.nc M2rowdrg.nc mask_itf.nc \
+                decay_scale_bot.nc decay_scale_cri.nc \
+                mixing_power_bot.nc mixing_power_cri.nc mixing_power_pyc.nc \
+                runoff_depth.nc subbasins.nc
+            do
+                [ -f ${ini_data_dir}/nemo/initial/${nem_grid}/$f ] && ln -s ${ini_data_dir}/nemo/initial/${nem_grid}/$f
+            done
+
+            # Copying the time independent NEMO files for the matching ORCA grid in order to facilitate cmorisation:
+            for f in \
+                bathy_meter.nc subbasins.nc
+            do
+                mkdir -p output/nemo/ofx-data
+                [ -f ${ini_data_dir}/nemo/initial/${nem_grid}/$f ] && cp -f ${ini_data_dir}/nemo/initial/${nem_grid}/$f output/nemo/ofx-data/
+            done
+
+            # Link geothermal heating file (independent of grid) and matching weight file
+            ln -s ${ini_data_dir}/nemo/initial/Goutorbe_ghflux.nc
+            ln -s ${ini_data_dir}/nemo/initial/weights_Goutorbe1_2_orca${nem_res_hor}_bilinear.nc
+
+            # Link the salinity climatology file (needed for diagnostics)
+            ln -s ${ini_data_dir}/nemo/climatology/${nem_grid}/sali_ref_clim_monthly.nc
+
+            # Link either restart files or climatology files for the initial state
+            if $(has_config nemo:start_from_restart)
+            then
+                # When linking restart files, we accept three options:
+                # (1) Merged files for ocean and ice, i.e.
+                #     restart_oce.nc and restart_ice.nc
+                # (2) One-file-per-MPI-rank, i.e.
+                #     restart_oce_????.nc and restart_ice_????.nc
+                #     No check is done whether the number of restart files agrees
+                #     with the number of MPI ranks for NEMO!
+                # (3) One-file-per-MPI-rank with a prefix, i.e.
+                #     <exp_name>_<time_step>_restart_oce_????.nc (similar for the ice)
+                #     The prefix is ignored.
+                # The code assumes that one of the options can be applied! If more
+                # options are applicable, the first is chosen. If none of the
+                # options apply, NEMO will crash with missing restart file.
+                if   ls -U ${nem_restart_file_path}/restart_[oi]ce.nc > /dev/null 2>&1
+                then
+                    ln -s ${nem_restart_file_path}/restart_[oi]ce.nc ./
+
+                elif ls -U ${nem_restart_file_path}/restart_[oi]ce_????.nc > /dev/null 2>&1
+                then
+                    ln -s ${nem_restart_file_path}/restart_[oi]ce_????.nc ./
+
+                else
+                    for f in ${nem_restart_file_path}/????_????????_restart_[oi]ce_????.nc
+                    do
+                        ln -s $f $(echo $f | sed 's/.*_\(restart_[oi]ce_....\.nc\)/\1/')
+                    done
+                fi
+            else
+
+                # Temperature and salinity files for initialisation
+                ln -s ${ini_data_dir}/nemo/climatology/absolute_salinity_WOA13_decav_Reg1L75_clim.nc
+                ln -s ${ini_data_dir}/nemo/climatology/conservative_temperature_WOA13_decav_Reg1L75_clim.nc
+                ln -s ${ini_data_dir}/nemo/climatology/weights_WOA13d1_2_orca${nem_res_hor}_bilinear.nc
+
+                # Grid dependent runoff files
+                case ${nem_grid} in
+                    ORCA1*)   ln -s ${ini_data_dir}/nemo/climatology/runoff-icb_DaiTrenberth_Depoorter_ORCA1_JD.nc ;;
+                    ORCA025*) ln -s ${ini_data_dir}/nemo/climatology/ORCA_R025_runoff_v1.1.nc ;;
+                esac
+            fi
+
+            # for ocean_nudging
+            if $(has_config nemo:ocenudg) ; then
+                ln -s ${ini_data_dir}/nemo/oce_nudg/resto.nc
+            fi
+
+            # XIOS files
+            . ${ctrl_file_dir}/iodef.xml.sh > iodef.xml
+            ln -s ${ctrl_file_dir}/context_nemo.xml
+            ln -s ${ctrl_file_dir}/domain_def_nemo.xml
+            ln -s ${ctrl_file_dir}/axis_def_nemo.xml
+            ln -s ${ctrl_file_dir}/grids_def_nemo.xml
+            ln -s ${ctrl_file_dir}/field_def_nemo-lim.xml
+            ln -s ${ctrl_file_dir}/field_def_nemo-opa.xml
+            ln -s ${ctrl_file_dir}/field_def_nemo-pisces.xml
+            ln -s ${ctrl_file_dir}/field_def_nemo-inerttrc.xml
+            ln -s ${output_control_files_dir}/file_def_nemo-lim3.xml file_def_nemo-lim.xml
+            ln -s ${output_control_files_dir}/file_def_nemo-opa.xml
+            ln -s ${output_control_files_dir}/file_def_nemo-pisces.xml
+
+            if [ -f ${ini_data_dir}/xios/ORCA${nem_res_hor}/coordinates_xios.nc ]
+            then
+                cp ${ini_data_dir}/xios/ORCA${nem_res_hor}/coordinates_xios.nc ./
+            else
+                info "File 'coordinates_xios.nc' not found. NEMO can not be run with land domain removal!"
+            fi
+
+            # Files needed for TOP/PISCES
+            if $(has_config pisces)
+            then
+                ln -fs ${ini_data_dir}/nemo/pisces/dust_INCA_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/ndeposition_Duce_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/pmarge_etopo_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/river_global_news_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/Solubility_T62_Mahowald_ORCA_R1.nc
+
+                ln -fs ${ini_data_dir}/nemo/pisces/par_fraction_gewex_clim90s00s_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/DIC_GLODAP_annual_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/Alkalini_GLODAP_annual_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/O2_WOA2009_monthly_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/PO4_WOA2009_monthly_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/Si_WOA2009_monthly_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/DOC_PISCES_monthly_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/Fer_PISCES_monthly_ORCA_R1.nc
+                ln -fs ${ini_data_dir}/nemo/pisces/NO3_WOA2009_monthly_ORCA_R1.nc
+
+                # create co2 concentration file atcco2.txt if required
+                if { [ $ifs_cmip_fixyear -gt 0 ] || [[ "${ifs_A4xCO2}" = "TRUE" ]]; } && [[ "${bgc_1PCTCO2}" = "FALSE" ]]
+                then
+                    rm -f atcco2.txt
+                elif [[ "${bgc_1PCTCO2}" = "TRUE" ]]
+                then
+                    cp -f ${ini_data_dir}/nemo/pisces/mole-fraction-of-carbon-dioxide-in-air_1pctCO2_1849-2016.txt atcco2.txt
+                else
+                    # determine scenario-name and co2-file middle-fix
+                    case $(echo ${ifs_cmip6_scenario} | tr '[:upper:]' '[:lower:]') in
+                        hist*)     pis_scen="ssp585"; pis_sco2_mfix="REMIND-MAGPIE-ssp585-1-2-1";;
+                        ssp2-4.5*) pis_scen="ssp245"; pis_sco2_mfix="MESSAGE-GLOBIOM-ssp245-1-2-1";;
+                        ssp5-3.4*) pis_scen="ssp534os"; pis_sco2_mfix="REMIND-MAGPIE-ssp534-over-1-2-1";;
+                        ssp5-8.5*) pis_scen="ssp585"; pis_sco2_mfix="REMIND-MAGPIE-ssp585-1-2-1";;
+                        *)  error "Scenario ${ifs_cmip6_scenario} not defined for PISCES" ;;
+                    esac
+
+                    # concatenate historic and scenario (2015+) co2 concentration file
+                    pis_sco2_pfix="${ini_data_dir}/nemo/pisces/mole-fraction-of-carbon-dioxide-in-air_input4MIPs_GHGConcentrations"
+                    cat ${pis_sco2_pfix}_CMIP_UoM-CMIP-1-2-0_gr1-GMNHSH_1849-2014.txt ${pis_sco2_pfix}_ScenarioMIP_UoM-${pis_sco2_mfix}_gr1-GMNHSH_2015-2500.txt > atcco2.txt
+                fi
+            fi
+
+            #linking surface boundary conditions for CFCs (even if CFCs are not included)
+            ln -fs ${ini_data_dir}/nemo/cfc/CFCs_CDIAC_extension_1637_2019.dat CFCs_CDIAC.dat
+
+            if $(has_config pisces:start_from_restart)
+            then
+            # Same three options as for nemo:start_from_restart
+                if   ls -U ${nem_restart_file_path}/restart_trc.nc > /dev/null 2>&1
+                then
+                    ln -s ${nem_restart_file_path}/restart_trc.nc ./
+
+                elif ls -U ${nem_restart_file_path}/restart_trc_????.nc > /dev/null 2>&1
+                then
+                    ln -s ${nem_restart_file_path}/restart_trc_????.nc ./
+
+                else
+                    for f in ${nem_restart_file_path}/????_????????_restart_trc_????.nc
+                    do
+                        ln -s $f $(echo $f | sed 's/.*_\(restart_trc_....\.nc\)/\1/')
+                    done
+                fi
+            fi
+
+        fi
+
+        # ---------------------------------------------------------------------
+        # *** Files needed for the Runoff mapper (linked)
+        # ---------------------------------------------------------------------
+
+        has_config rnfmapper && \
+            ln -s ${ini_data_dir}/runoff-mapper/runoff_maps.nc
+
+        # -------------------------------------------------------------------------
+        # *** File and dir needed for TM5
+        # -------------------------------------------------------------------------
+        if $(has_config tm5)
+        then
+            tm5_istart=9
+
+            case ${tm5_istart} in
+                33|32) ln -s \
+                    ${tm5_restart_file_path}/TM5_restart_${leg_start_date_yyyymmdd}_0000_glb300x200.nc
+                    ;;
+                31) ln -s ${tm5_restart_file_path}/tm5_save.hdf
+                    ;;
+                5)  ln -s ${tm5_restart_file_path}/tm5_mmix.hdf
+                    ;;
+                2|9) 
+                    ;;
+                *)  error "Unsupported initial fields option (TM5): ${tm5_istart}"
+                    ;;
+            esac
+
+            # spectral info
+            ln -s ${ini_data_dir}/tm5/TM5_INPUT/T${ifs_res_hor}_info.txt
+
+            # Profiling dir for TM5
+            mkdir -p ${run_dir}/tm5_profile
+        fi
+
+        # ---------------------------------------------------------------------
+        # *** Files needed for OASIS (linked)
+        # ---------------------------------------------------------------------
+
+        # Name table file
+        ln -s ${ini_data_dir}/oasis/cf_name_table.txt
+
+        # -- Get grid definition and weight files for IFS/NEMO or IFS/AMIP coupling
+        has_config nemo && \
+            oas_grid_dir=${ini_data_dir}/oasis/T${ifs_res_hor}-ORCA${nem_res_hor} && \
+            mycp='cp'
+
+        has_config amip && \
+            oas_grid_dir=${ini_data_dir}/oasis/AMIP && \
+            mycp='cdo -f nc copy' # to enforce nc format, needed for 'cdo merge' to work (have nc4c with Primavera files)
+
+        # Grid definition files
+        if $(has_config tm5)
+        then
+            ${mycp} ${oas_grid_dir}/areas.nc gcm_areas.nc
+            ${mycp} ${oas_grid_dir}/grids.nc gcm_grids.nc
+            ${mycp} ${oas_grid_dir}/masks.nc gcm_masks.nc
+        else
+            ln -s ${oas_grid_dir}/areas.nc
+            ln -s ${oas_grid_dir}/grids.nc
+            ln -s ${oas_grid_dir}/masks.nc
+        fi
+
+        # Weight files
+        case ${ifs_res_hor} in
+            159)  oas_agrd=080
+                  ;;
+            255)  oas_agrd=128
+                  ;;
+            511)  oas_agrd=256
+                  ;;
+            799)  oas_agrd=400
+                  ;;
+            *)    error "Unsupported horizontal resolution (IFS): ${ifs_res_hor}"
+                  ;;
+        esac
+
+        if $(has_config nemo)
+        then
+            case ${nem_res_hor} in
+                1)  oas_ogrd=O1t0
+                    ;;
+                025)  oas_ogrd=Ot25
+                    ;;
+                *)  error "Unsupported horizontal resolution (NEMO): ${nem_res_hor}"
+                    ;;
+            esac
+        fi
+
+        if ${oas_link_weights}
+        then
+            for f in ${oas_grid_dir}/rmp_????_to_????_GAUSWGT.nc
+            do
+                ln -s $f
+            done
+        fi
+
+        if $(has_config ifs nemo)
+        then
+            for f in ${oas_rst_ifs_nemo}
+            do
+                cp ${oas_grid_dir}/rst/$f .
+            done
+        fi
+
+        # -- Get grid definition, weight and restart files for TM5 coupling
+        if $(has_config tm5)
+        then
+            oas_grid_dir=${ini_data_dir}/oasis/T${ifs_res_hor}-TM5-LPJG
+
+            cp ${oas_grid_dir}/tm5_areas.nc .
+            cp ${oas_grid_dir}/tm5_grids.nc .
+            cp ${oas_grid_dir}/tm5_masks.nc .
+
+            if ${oas_link_weights}
+            then
+                for f in ${oas_grid_dir}/rmp_????_to_????_*.nc
+                do
+                    ln -s $f
+                done
+            fi
+
+            # -- Get restart files for TM5-IFS/LPJG/PISCES 
+            for f in ${oas_rst_ifs_tm5}
+            do
+                cp ${oas_grid_dir}/rst/${tm5_exch_nlevs}-levels/$f .
+            done
+
+            # -- Merge grid definition files
+            cdo merge gcm_areas.nc tm5_areas.nc areas.nc
+            cdo merge gcm_grids.nc tm5_grids.nc grids.nc
+            cdo merge gcm_masks.nc tm5_masks.nc masks.nc
+        fi
+
+    else # i.e. $leg_is_restart == true
+
+        # ---------------------------------------------------------------------
+        # *** Remove all leftover output files from previous legs
+        # ---------------------------------------------------------------------
+
+        # IFS files
+        rm -f ICM{SH,GG}${exp_name}+??????
+
+        # NEMO files
+        rm -f ${exp_name}_??_????????_????????_{grid_U,grid_V,grid_W,grid_T,icemod,SBC,scalar,SBC_scalar,diad_T}.nc
+
+        # TM5 restart file type
+        tm5_istart=33
+
+        if [ $tm5_istart -eq 31 ] && $(has_config tm5)
+        then
+            ln -sf save_${leg_start_date_yyyymmdd}00_glb300x200.hdf tm5_save.hdf
+        fi
+
+    fi # ! $leg_is_restart
+
+    #--------------------------------------------------------------------------
+    # *** Surface restoring and ocean nudging options
+    #--------------------------------------------------------------------------
+    if $(has_config nemo:ocenudg) ; then
+        ln -fs ${ini_data_dir}/nemo/oce_nudg/temp_sal*.nc ./
+    fi
+
+    if $(has_config nemo:surfresto) ; then
+        ln -fs ${ini_data_dir}/nemo/surface_restoring/sss_restore_data*.nc  ./
+        ln -fs ${ini_data_dir}/nemo/surface_restoring/sst_restore_data*.nc  ./
+        ln -fs ${ini_data_dir}/nemo/surface_restoring/mask_restore*.nc ./
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Remove land grid-points
+    # -------------------------------------------------------------------------
+    if $(has_config nemo:elpin)
+    then
+        if [ ! -f coordinates_xios.nc ]
+        then
+            error "ELpIN requested, but file 'coordinates_xios.nc' was not found"
+        fi
+        jpns=($(${ecearth_src_dir}/util/ELPiN/ELPiNv2.cmd ${nem_numproc}))
+        info "nemo domain decompostion from ELpIN: ${jpns[@]}"
+        nem_numproc=${jpns[0]}
+        nem_jpni=${jpns[1]}
+        nem_jpnj=${jpns[2]}
+    elif has_config nemo
+    then
+        info "nemo original domain decomposition (not using ELPiN)"
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Initial conditions saved during the run
+    # -------------------------------------------------------------------------
+    ${do_save_ic} && save_ic_prepare_output
+
+    # -------------------------------------------------------------------------
+    # *** Create some control files
+    # -------------------------------------------------------------------------
+
+    # Create TM5 runtime rcfile
+    tm5_start_date=$(date -u -d "${leg_start_date}" +%F\ %T)
+    tm5_end_date=$(date -u -d "${leg_end_date}" +%F\ %T)
+
+    if $(has_config tm5)
+    then
+        cp -f ${ctrl_file_dir}/tm5-config-run.rc ${run_dir}
+        ${ecearth_src_dir}/tm5mp/setup_tm5 --no-compile \
+            --time-start="${tm5_start_date}" --time-final="${tm5_end_date}" \
+            --istart=${tm5_istart} ${run_dir}/tm5-config-run.rc
+    fi
+
+    # IFS frequency output for namelist
+    if [ -f postins/pptdddddd0300 ]
+    then
+        ifs_output_freq=$(( 3 * 3600 / ifs_time_step_sec ))
+    elif [ -f postins/pptdddddd0600 ]
+    then
+        ifs_output_freq=$(( 6 * 3600 / ifs_time_step_sec ))
+    else
+        error "IFS output frequency undefined."
+    fi
+
+    # IFS, NEMO, LIM, AMIP namelist and OASIS namcouple files
+    has_config ifs       && . ${ctrl_file_dir}/namelist.ifs.sh                          > fort.4
+    has_config nemo      && . ${ctrl_file_dir}/namelist.nemo.ref.sh                     > namelist_ref
+    has_config ifs nemo  && . ${ctrl_file_dir}/namelist.nemo-${nem_grid}-coupled.cfg.sh > namelist_cfg
+    has_config lim3      && . ${ctrl_file_dir}/namelist.lim3.ref.sh                     > namelist_ice_ref
+    has_config lim3      && . ${ctrl_file_dir}/namelist.lim3-${nem_grid}.cfg.sh         > namelist_ice_cfg
+    has_config rnfmapper && . ${ctrl_file_dir}/namelist.runoffmapper.sh                 > namelist.runoffmapper
+    has_config amip      && . ${ctrl_file_dir}/namelist.amip.sh                         > namelist.amip
+    has_config pisces    && . ${ctrl_file_dir}/namelist.nemo.top.ref.sh                 > namelist_top_ref
+    has_config pisces    && . ${ctrl_file_dir}/namelist.nemo.top.cfg.sh                 > namelist_top_cfg
+    has_config pisces    && . ${ctrl_file_dir}/namelist.nemo.pisces.ref.sh              > namelist_pisces_ref
+    has_config pisces    && . ${ctrl_file_dir}/namelist.nemo.pisces.cfg.sh              > namelist_pisces_cfg
+    has_config nemo      && . ${ctrl_file_dir}/namelist.nemo.age.ref.sh                 > namelist_age_ref
+    has_config nemo      && . ${ctrl_file_dir}/namelist.nemo.age.cfg.sh                 > namelist_age_cfg
+    #include CFCs namelist even if CFCs are not included
+    has_config nemo      && . ${ctrl_file_dir}/namelist.nemo.cfc.ref.sh                 > namelist_cfc_ref
+    has_config nemo      && . ${ctrl_file_dir}/namelist.nemo.cfc.cfg.sh                 > namelist_cfc_cfg
+    # C-cycle - overwrite coupled nemo namelist
+    has_config pisces tm5:co2  && \
+        . ${ctrl_file_dir}/namelist.nemo-${nem_grid}-carboncycle.cfg.sh > namelist_cfg
+
+    lucia=0
+    . ${ctrl_file_dir}/namcouple.sh > namcouple
+
+    # -------------------------------------------------------------------------
+    # *** LPJ-GUESS initial data
+    # -------------------------------------------------------------------------
+    if $(has_config lpjg)
+    then
+        # LPJG runtime rcfile - update with leg dates
+        . ${ctrl_file_dir}/namelist.lpjg.sh > lpjg_steps.rc
+
+        # determine lpjg scenario-name and co2-file middle-fix
+        case $(echo ${ifs_cmip6_scenario} | tr '[:upper:]' '[:lower:]') in
+            hist*)     lpjg_scen="ssp370"; lu_src="AIM"    ; lpjg_sco2_mfix="AIM-ssp370-1-2-1"            ; lu_file_posfix="2018_10_08.txt";;
+            ssp1-1.9*) lpjg_scen="ssp119"; lu_src="IMAGE"  ; lpjg_sco2_mfix="IMAGE-ssp119-1-2-1"          ; lu_file_posfix="2019_03_13.txt";;
+            ssp1-2.6*) lpjg_scen="ssp126"; lu_src="IMAGE"  ; lpjg_sco2_mfix="IMAGE-ssp126-1-2-1"          ; lu_file_posfix="2018_10_08.txt";;
+            ssp2-4.5*) lpjg_scen="ssp245"; lu_src="MESSAGE"; lpjg_sco2_mfix="MESSAGE-GLOBIOM-ssp245-1-2-1"; lu_file_posfix="2018_10_08.txt";;
+            ssp3-7.0*) lpjg_scen="ssp370"; lu_src="AIM"    ; lpjg_sco2_mfix="AIM-ssp370-1-2-1"            ; lu_file_posfix="2018_10_08.txt";;
+            ssp4-3.4*) lpjg_scen="ssp434"; lu_src="GCAM"   ; lpjg_sco2_mfix="GCAM4-ssp434-1-2-1"            ; lu_file_posfix="2018_10_08.txt";;
+            ssp5-3.4*) lpjg_scen="ssp534os"; lu_src="MAGPIE"; lpjg_sco2_mfix="REMIND-MAGPIE-ssp534-over-1-2-1"            ; lu_file_posfix="2019_05_10.txt";;
+            ssp5-8.5*) lpjg_scen="ssp585"; lu_src="MAGPIE" ; lpjg_sco2_mfix="REMIND-MAGPIE-ssp585-1-2-1"  ; lu_file_posfix="2018_10_08.txt";;
+            *)  error "Scenario ${ifs_cmip6_scenario} not defined for LPJ-GUESS" ;;
+        esac
+
+        lpjg_scenario_new="historical + $lpjg_sco2_mfix"
+        lpjg_scenario_info=${run_dir}/lpjg_scenario.info
+
+        if [ -f $lpjg_scenario_info ]
+        then
+            source $lpjg_scenario_info
+            if [ "$lpjg_scenario_new" = "$lpjg_scenario" ]
+            then
+                lpjg_copy_rte=false
+            else
+                lpjg_copy_rte=true
+            fi
+        else
+            lpjg_copy_rte=true
+        fi
+
+        # copy RTE only if necessary (at beginning of a run or when scenario changes)
+        if $lpjg_copy_rte
+        then
+            # write info about installed scenarios to file
+            echo "lpjg_scenario=\"historical + $lpjg_sco2_mfix\"" > $lpjg_scenario_info
+            # set file prefixes depending on scenario
+            lu_file_prefix="1850_2100_luh2_Hist_ScenarioMIP_UofMD"
+            lu_file_midfix="2_1_f"
+
+            lu_path="${ini_data_dir}/lpjg/landuse/${lpjg_res}/${lpjg_scen}"
+
+            # copy and reference-link landuse,  gross transitions, crops, n-fertilisation
+            for inp in lu gross crop nfert
+            do
+                if [ $inp == "crop" -o $inp == "nfert" ]
+                then
+                    lu_src_file="${lu_path}/${inp}_rfirr_${lu_file_prefix}_${lu_src}_${lpjg_scen}_${lu_file_midfix}_${lpjg_res}_${lu_file_posfix}"
+                else
+                    lu_src_file="${lu_path}/${inp}_${lu_file_prefix}_${lu_src}_${lpjg_scen}_${lu_file_midfix}_${lpjg_res}_${lu_file_posfix}"
+                fi
+                cp -f $lu_src_file ${run_dir}/landuse/${inp}_luh2.txt
+                ln -fs $lu_src_file ${run_dir}/landuse/${inp}_luh2.txt.lnk
+            done
+            
+            # nitrogen deposition files
+            mkdir -p ${run_dir}/ndep
+            for inp in drynhx2 drynoy2 wetnhx2 wetnoy2
+            do
+                ndep_src_file="${ini_data_dir}/lpjg/ndep/${lpjg_res}/${lpjg_scen}/${lpjg_scen}_${lpjg_res}_${inp}.nc"
+                cp -f  $ndep_src_file ${run_dir}/ndep/${inp}.nc
+                ln -fs  $ndep_src_file ${run_dir}/ndep/${inp}.nc.lnk
+            done
+
+            # concatenate historic and scenario (2015+) co2 concentration file
+            # hist co2 file 
+            lpjg_hco2_file="${ini_data_dir}/ifs/cmip6-data/mole-fraction-of-carbon-dioxide-in-air_input4MIPs_GHGConcentrations_CMIP_UoM-CMIP-1-2-0_gr1-GMNHSH_0000-2014.nc"
+            # scenario co2 file 
+            lpjg_sco2_file="${ini_data_dir}/ifs/cmip6-data/mole-fraction-of-carbon-dioxide-in-air_input4MIPs_GHGConcentrations_ScenarioMIP_UoM-${lpjg_sco2_mfix}_gr1-GMNHSH_2015-2500.nc"
+            # combined file
+            lpjg_co2_file="${run_dir}/mole_fraction_of_carbon_dioxide_in_air_input4MIPs_lpjg.nc"
+            rm -f $lpjg_co2_file
+            cdo mergetime $lpjg_hco2_file $lpjg_sco2_file $lpjg_co2_file
+        fi
+
+        # Populate or update LPJG run directories
+        for (( n=1; n<=${lpjg_numproc}; n++ ))
+        do
+            # if run from scratch or number of procs has been extended
+            if ! $leg_is_restart || [ ! -d ${run_dir}/run${n} ]
+            then
+                # Make output directories
+                mkdir -p ${run_dir}/run${n}/output
+
+                # Copy *.ins, lpjg_steps.rc and OASIS-MCT restart files
+                cp ${run_dir}/*.ins ${run_dir}/run${n}
+
+                # Copy output control files
+                cp ${output_control_files_dir}/lpjg_cmip6_output.ins ${run_dir}/run${n}
+                ln -s ${output_control_files_dir}/lpjg_cmip6_output.ins ${run_dir}/run${n}/lpjg_cmip6_output.ins.lnk
+
+                cp ${ini_data_dir}/lpjg/oasismct/ghg*.txt ${run_dir}/run${n}
+                cp ${ini_data_dir}/lpjg/oasismct/${lpjg_res}/ece_gridlist_${lpjg_res}.txt ${run_dir}/run${n}/ece_gridlist.txt
+                ln -s ${ini_data_dir}/lpjg/oasismct/${lpjg_res}/ece_gridlist_${lpjg_res}.txt ${run_dir}/run${n}/ece_gridlist.txt.lnk
+
+                # Data only needed by master
+                if [ $n == 1 ]
+                then
+                    cp ${ini_data_dir}/lpjg/oasismct/${lpjg_res}/*.nc ${run_dir}
+                    cp ${ini_data_dir}/lpjg/oasismct/lpjgv.txt ${run_dir}/run${n}
+                fi
+            fi
+            # Refresh output-dirs after they hav been removed at end of the last leg
+            mkdir -p ${run_dir}/run${n}/output/CMIP6
+            mkdir -p ${run_dir}/run${n}/output/CRESCENDO
+        done
+
+        if $leg_is_restart
+        then
+            lpjg_restart_dir="restart/lpjg/$(printf %03d $((leg_number-1)))"
+            lpjg_rst_state="${lpjg_restart_dir}/lpjg_state_${leg_start_date_yyyy}"
+            if [ -d "$lpjg_rst_state" ]
+            then
+                ln -sf $lpjg_rst_state
+            else
+                echo "lpjg restart dir $lpjg_rst_state not available"
+                exit -1
+            fi
+            # get oasis restart files
+            oasis_restart_dir="restart/oasis/$(printf %03d $((leg_number)))"
+            cp -f ${oasis_restart_dir}/*.nc ${run_dir}/
+
+        fi
+    fi
+
+
+    # -------------------------------------------------------------------------
+    # *** Create ICMCL file with vegetation fields
+    #     not needed if LPJG is used with feedback
+    # -------------------------------------------------------------------------
+    tempfile=tmp.$$
+
+    case ${ifs_veg_source} in
+    "ifs" )
+        # Vegetation from IFS (climatology)
+
+        icmclfile=${ini_data_dir}/ifs/${ifs_grid}/climate/ICMCL_ONLY_VEG_PD
+
+        # Create data for december, the year before the leg starts
+        ${grib_set} \
+            -s dataDate=$(printf "%04d" $((leg_start_date_yyyy-1)))1215 \
+            ${icmclfile}-12 ICMCL${exp_name}INIT
+
+        # Create data for all month in the years of the leg
+        for (( y=${leg_start_date_yyyy} ; y<=${leg_end_date_yyyy} ; y++ ))
+        do
+            yy=$(printf "%04d" $y)
+            for m in {1..12}
+            do
+                mm=$(printf "%02d" $m)
+                ${grib_set} -s dataDate=${yy}${mm}15 ${icmclfile}-${mm} ${tempfile}
+                cat ${tempfile} >> ICMCL${exp_name}INIT
+            done
+        done
+
+        # Create data for january, the year after the leg ends
+        ${grib_set} \
+            -s dataDate=$(printf "%04d" $((leg_end_date_yyyy+1)))0115 \
+            ${icmclfile}-01 ${tempfile}
+        cat ${tempfile} >> ICMCL${exp_name}INIT
+        ;;
+    "era20c"|"cmip6"|"custom_"* )
+        # Vegetation from a LPJG run (off-line or EC-Earth3-Veg)
+
+        rm -f ICMCL${exp_name}INIT
+
+        # Create data for all years of the leg, including one year
+        # before and one year after
+        for (( yr=leg_start_date_yyyy-1 ; yr<=leg_end_date_yyyy+1 ; yr+=1 ))
+        do
+
+            case ${ifs_veg_source} in
+            'era20c' )
+                # no scenario needed with era20c
+                icmcl_scenario="" ;;
+            'custom_'* )
+                # no scenario implemented yet with custom_dir
+                icmcl_scenario="" ;;
+            'cmip6' )
+                # select scenario, use SSP3-7.0 as default
+                # if not otherwise specified
+                icmcl_scenario="historical"
+                if ( [ $ifs_cmip_fixyear -le 0 ] && [ $yr -ge 2015 ] ) || \
+                     [ $ifs_cmip_fixyear -ge 2015 ]
+                then
+                    [[ ${ifs_cmip6_scenario} =~ ^SSP ]] \
+                        && icmcl_scenario=${ifs_cmip6_scenario} \
+                        || if [ ${ifs_covid19^^}  == TRUE ]
+                           then
+                               icmcl_scenario='SSP2-4.5'
+                           else
+                               icmcl_scenario='SSP3-7.0'
+                           fi
+                fi ;;
+            esac
+            
+            if [ $ifs_cmip_fixyear -le 0 ] || [[ ${ifs_veg_source} == custom_* ]]
+            then
+                cat ${ini_data_dir}/ifs/${ifs_grid}/icmcl_${veg_version}/${icmcl_scenario}/icmcl_$yr.grb >> ICMCL${exp_name}INIT
+            else
+                # Fixed year forcing, requires cdo! (only when not using ifs_veg_source=custom_exp*)
+                # If cdo is not available at runtime you need to fix proper
+                # icmcl files beforehand and use them here
+                cdo setyear,$yr ${ini_data_dir}/ifs/${ifs_grid}/icmcl_${veg_version}/${icmcl_scenario}/icmcl_${ifs_cmip_fixyear}.grb ${tempfile}
+                cat ${tempfile} >> ICMCL${exp_name}INIT
+            fi
+        done
+        ;;
+    "none" )
+        info "no ICMCL file is created"
+        ;;
+    * )
+        error "Vegetation from ${ifs_veg_source} not implemented"
+        ;;
+    esac
+
+    # Clean up
+    rm -f ${tempfile}
+
+    # -------------------------------------------------------------------------
+    # *** Link the appropriate NEMO restart files of the previous leg
+    # -------------------------------------------------------------------------
+    if $leg_is_restart && $(has_config nemo) && ! $special_restart
+    then
+        ns=$(printf %08d $(( leg_start_sec / nem_time_step_sec - nem_restart_offset )))
+        for (( n=0 ; n<nem_numproc ; n++ ))
+        do
+            np=$(printf %04d ${n})
+            ln -fs ${exp_name}_${ns}_restart_oce_${np}.nc restart_oce_${np}.nc
+            ln -fs ${exp_name}_${ns}_restart_ice_${np}.nc restart_ice_${np}.nc
+            has_config pisces && \
+                ln -fs ${exp_name}_${ns}_restart_trc_${np}.nc restart_trc_${np}.nc
+        done
+
+        # Make sure there are no global restart files
+        # If links are found, they will be removed. We are cautious and do
+        # _not_ remove real files! However, if real global restart files are
+        # present, NEMO/LIM will stop because time stamps will not match.
+        [ -h restart_oce.nc ] && rm restart_oce.nc
+        [ -h restart_ice.nc ] && rm restart_ice.nc
+        [ -h restart_trc.nc ] && rm restart_trc.nc
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Remove some OASIS files of the previous leg
+    # -------------------------------------------------------------------------
+    if $leg_is_restart
+    then
+        rm -f anaisout_*
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Remove any ccycle debug output files
+    # -------------------------------------------------------------------------
+    if ${ccycle_debug_fluxes} && $leg_is_restart && $(has_config tm5:co2)
+    then
+      if $(has_config lpjg)
+      then
+          rm -f GUE_{CNAT,CANT,CNPP}_*.nc run1/GUE_{CNAT,CANT,CNPP}_*.nc
+          rm -f TM5_Land{CNAT,CANT,CNPP}_*.nc
+      fi
+      if $(has_config pisces)
+      then
+          rm -f O_CO2FLX_*.nc
+          rm -f TM5_OceCFLX_*.nc
+      fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Check consistency of OASIS restarts and IFS rcf with ece_info_file
+    # -------------------------------------------------------------------------
+    if $leg_is_restart
+    then
+        info "Check OASIS restarts and IFS rcf before launch"
+        leg_nb_3d=$(printf %03d $((leg_number)))
+
+        for oasis_restart_file in restart/oasis/${leg_nb_3d}/*
+        do
+            [ ! -f "${oasis_restart_file}" ] && break
+
+            if ! diff -q $(basename ${oasis_restart_file}) ${oasis_restart_file} >/dev/null
+            then
+                echo "*WARNING* Overwriting OASIS restart (${oasis_restart_file})"
+                cp -f ${oasis_restart_file} .
+            fi
+        done
+
+        rcf_restart_file=restart/ifs/${leg_nb_3d}/rcf
+        if [ -f "${rcf_restart_file}" ]
+        then
+            if ! diff -q rcf ${rcf_restart_file} >/dev/null
+            then
+                echo "*WARNING* Overwriting IFS rcf file (${rcf_restart_file})"
+                cp -f ${rcf_restart_file} .
+            fi
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Start the run
+    # -------------------------------------------------------------------------
+    export DR_HOOK_IGNORE_SIGNALS='-1'
+    export CPLNG='active'
+
+    # Use the launch function from the platform configuration file
+    has_config nemo && \
+        cmd="${xio_numproc} ${xio_exe_file} -- \
+             ${nem_numproc} ${nem_exe_file} -- \
+             ${ifs_numproc} ${ifs_exe_file} -v ecmwf -e ${exp_name}" || \
+        cmd="${ifs_numproc} ${ifs_exe_file} -v ecmwf -e ${exp_name}"
+
+    has_config lpjg && cmd=${cmd}" -- ${lpjg_numproc} ${lpjg_exe_file} guess.ins -parallel"
+    has_config tm5  && cmd=${cmd}" -- ${tm5_numproc}  ${tm5_exe_file} tm5-run.rc"
+    has_config amip && cmd=${cmd}" -- ${amip_numproc} ${amip_exe_file}"
+    has_config nemo && cmd=${cmd}" -- ${rnf_numproc} ${rnf_exe_file}" 
+
+    t1=$(date +%s)
+    launch $cmd
+    t2=$(date +%s)
+
+    tr=$(date -d "0 -$t1 sec + $t2 sec" +%T)
+
+    # -------------------------------------------------------------------------
+    # *** Check for signs of success
+    #     Note the tests provide no guarantee that things went fine! They are
+    #     just based on the IFS, NEMO and TM5 log files. More tests (e.g. checking
+    #     restart files) could be implemented.
+    # -------------------------------------------------------------------------
+
+    # Checking for IFS success
+    if [ -f ifs.stat ]
+    then
+        if [ "$(awk 'END{print $3}' ifs.stat)" == "CNT0" ]
+        then
+            info "Leg successfully completed according to IFS log file 'ifs.stat'."
+        else
+            error "Leg not completed according to IFS log file 'ifs.stat'."
+        fi
+    else
+        error "IFS log file 'ifs.stat' not found after run."
+    fi
+
+    # Check for NEMO success
+    if $(has_config nemo)
+    then
+        if [ -f ocean.output ]
+        then
+            if [ "$(sed -n '/New day/h; ${g;s:.*\([0-9/]\{10\}\).*:\1:;p;}' ocean.output)" == "$(date -u -d "${leg_end_date} - 1 day" +%Y/%m/%d)" ]
+            then
+                info "Leg successfully completed according to NEMO log file 'ocean.output'."
+            else
+                error "Leg not completed according to NEMO log file 'ocean.output'."
+            fi
+        else
+            error "NEMO log file 'ocean.output' not found after run."
+        fi
+    fi
+
+    # Check for TM5 success
+    if $(has_config tm5)
+    then
+        if [ -f tm5.ok ]
+        then
+            info "Leg successfully completed according to existing TM5 file 'tm5.ok'."
+        else
+            error "Leg not completed according to non-existing TM5 file 'tm5.ok'."
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Post-process initial conditions saved during the run if requested
+    # -------------------------------------------------------------------------
+    ${do_save_ic} && save_ic_postproc
+
+    # -------------------------------------------------------------------------
+    # *** Move IFS output files to archive directory
+    # -------------------------------------------------------------------------
+    outdir="${archive_dir}/output/ifs/$(printf %03d $((leg_number)))"
+    mkdir -p ${outdir}
+
+    prv_leg=$(printf %03d $((leg_number-1)))
+
+    # This takes care of a special IFS feature: The output for the last time
+    # step of each leg is written at the first time step of the new leg. The
+    # following code makes sure that the output is appended to the appropriate
+    # file. Since GRIB files are just streams, its done with a simple cat
+    # command.
+    for f in ICMSH${exp_name}+?????? ICMGG${exp_name}+??????
+    do
+        if [ -f output/ifs/${prv_leg}/${f} ]
+        then
+            cat ${f} >> output/ifs/${prv_leg}/${f}
+            rm -f ${f}
+        else
+            mv ${f} ${outdir}
+        fi
+    done
+
+    # -------------------------------------------------------------------------
+    # *** Move NEMO output files to archive directory
+    # -------------------------------------------------------------------------
+    if $(has_config nemo)
+    then
+        outdir="${archive_dir}/output/nemo/$(printf %03d $((leg_number)))"
+        mkdir -p ${outdir}
+
+        for v in grid_U grid_V grid_W grid_T icemod SBC scalar SBC_scalar diad_T ptrc_T bioscalar \
+                 grid_T_2D grid_U_2D grid_V_2D grid_W_2D grid_T_3D grid_U_3D grid_V_3D grid_W_3D \
+                 grid_T_SFC grid_1point grid_T_3D_ncatice vert_sum \
+                 grid_ptr_W_3basin_3D grid_ptr_T_3basin_2D grid_ptr_T_2D \
+                 zoom_700_sum zoom_300_sum zoom_2000_sum
+        do
+            for f in ${exp_name}_*_????????_????????_*${v}.nc
+            do
+                test -f $f && mv $f $outdir/
+            done
+        done
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move LPJ-GUESS output files to archive directory
+    # -------------------------------------------------------------------------
+    if $(has_config lpjg)
+    then
+        outdir="output/lpjg/$(printf %03d $((leg_number)))"
+        if [ -d ${outdir} ]
+        then
+            rm -rf  ${outdir}
+        fi
+        mkdir -p ${outdir}
+
+        # LPJG run directories
+        # concatenate *.out (or compressed *.out.gz) files from each run* into output dir
+        flist=$(cd ${run_dir}/run1/output && find *.out.gz -type f 2>/dev/null || true)
+        if [ "$flist" = "" ]
+        then
+            lpjg_compress_output=false
+            flist=$(cd ${run_dir}/run1/output && find *.out -type f 2>/dev/null)
+        else
+            lpjg_compress_output=true
+        fi
+
+        mkdir ${outdir}/CMIP6
+
+        for (( n=1; n<=${lpjg_numproc}; n++ ))
+        do
+            for ofile in $flist
+            do
+                if $lpjg_compress_output
+                then
+                    [ $n == 1 ] && gzip -c ${run_dir}/run${n}/output/`basename ${ofile} .gz`.hdr > ${outdir}/$ofile
+                    cat ${run_dir}/run${n}/output/${ofile} >> ${outdir}/$ofile
+                else
+                    if (( n == 1 ))
+                    then
+                        cat ${run_dir}/run${n}/output/${ofile} > ${outdir}/$ofile
+                    else
+                        awk '(FNR!=1){print $0}' ${run_dir}/run${n}/output/${ofile} >> ${outdir}/$ofile
+                    fi
+                fi
+            done
+            rm -rf ${run_dir}/run${n}/output
+        done
+        
+        # move monthly file if available
+        if [ -f ${run_dir}/LPJ-GUESS_monthlyoutput.txt ]
+        then
+            mv ${run_dir}/LPJ-GUESS_monthlyoutput.txt ${outdir}
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move TM5 output files to archive directory
+    # -------------------------------------------------------------------------
+    if $(has_config tm5)
+    then
+        outdir="output/tm5/$(printf %03d $((leg_number)))"
+        mkdir -p ${outdir}
+
+        set +e
+        mv budget_??????????_??????????_global.hdf      ${outdir}
+        mv j_statistics_??????????_??????????.hdf       ${outdir}
+        mv mmix_??????????_??????????_glb???x???.hdf    ${outdir}
+        mv aerocom?_TM5_*_????????_daily.nc             ${outdir}
+        mv aerocom?_TM5_*_??????_monthly.nc             ${outdir}
+        mv AOD_????_??_??.nc                            ${outdir}
+        mv -f TM5MP_${exp_name}_griddef.nc              ${outdir}
+        mv TM5MP_${exp_name}_TP_????_??_??.nc           ${outdir}
+        mv TM5MP_${exp_name}_vmr3_????_??_??.nc         ${outdir}
+        mv general_TM5_${exp_name}_??????????_hourly.nc ${outdir}
+        mv general_TM5_${exp_name}_??????_monthly.nc    ${outdir}
+        mv *EC-Earth3-*_${exp_name}_*.nc                ${outdir}
+        set -e
+
+        # move profiling files if any
+        if [ "$(ls -A ${run_dir}/tm5_profile)" ]
+        then
+            outdir="output/tm5/profile_$(printf %03d $((leg_number)))"
+            mkdir -p ${outdir}
+
+            for f in ${run_dir}/tm5_profile/*
+            do
+                test -f ${f} && mv $f ${outdir}
+            done
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move IFS restart files to archive directory
+    # -------------------------------------------------------------------------
+    if $leg_is_restart
+    then
+        outdir="${archive_dir}/restart/ifs/$(printf %03d $((leg_number)))"
+        mkdir -p ${outdir}
+
+        # Figure out the time part of the restart files (cf. CTIME on rcf files)
+        # NOTE: Assuming that restarts are at full days (time=0000) only!
+        nd="$(printf %06d $((leg_start_sec/(24*3600))))0000"
+
+        mv srf${nd}.???? ${outdir}
+
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move ccycle debug output files to archive directory
+    # -------------------------------------------------------------------------
+    if ${ccycle_debug_fluxes} && $(has_config tm5:co2)
+    then
+      outdir="output/tm5/$(printf %03d $((leg_number)))"
+      mkdir -p ${outdir}
+      if $(has_config lpjg)
+      then
+          for f in CNAT CANT CNPP ; do
+              mv TM5_Land${f}_*.nc ${outdir}
+              rm -f GUE_${f}_*.nc run1/GUE_${f}_*.nc
+              #gf=`ls -1 GUE_${f}_*.nc | head -n 1`
+              #cdo mergetime ${gf} run1/${gf} ${outdir}/${gf}
+          done
+      fi
+      if $(has_config pisces)
+      then
+          mv TM5_OceCFLX_*.nc ${outdir}
+          rm -f O_CO2FLX_*.nc
+          #mv O_CO2FLX_*.nc ${outdir}
+      fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move LPJ-GUESS restart files to archive directory
+    # -------------------------------------------------------------------------
+    if $(has_config lpjg)
+    then
+        outdir="restart/lpjg/$(printf %03d $((leg_number)))"
+        if [ -d ${outdir} ]
+        then
+            rm -rf  ${outdir}
+        fi
+        mkdir -p ${outdir}
+
+        state_dir="./lpjg_state_$(printf %04d $((leg_end_date_yyyy)))"
+        mv ${state_dir} ${outdir}
+        # LPJG writes into run1 dir, so mv to main rundir
+        mv -f run1/lpjgv.nc .
+        has_config tm5:co2 lpjg && mv -f run1/rlpjg.nc .
+
+        # remove restart link
+        if $leg_is_restart
+        then
+            old_state_dir="./lpjg_state_$(printf %04d $((leg_start_date_yyyy)))"
+            if [ -L $old_state_dir ]
+            then
+                rm -f "$old_state_dir"
+            fi
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move NEMO restart files to archive directory
+    # -------------------------------------------------------------------------
+    if $leg_is_restart && $(has_config nemo)
+    then
+        outdir="${archive_dir}/restart/nemo/$(printf %03d $((leg_number)))"
+        mkdir -p ${outdir}
+
+        ns=$(printf %08d $(( leg_start_sec / nem_time_step_sec - nem_restart_offset )))
+        for f in oce ice
+        do
+            mv ${exp_name}_${ns}_restart_${f}_????.nc ${outdir}
+        done
+
+        if has_config pisces
+        then
+            mv ${exp_name}_${ns}_restart_trc_????.nc ${outdir}
+        fi
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Move TM5 restart file to archive directory
+    # -------------------------------------------------------------------------
+    if $leg_is_restart && $(has_config tm5)
+    then
+        outdir="restart/tm5/$(printf %03d $((leg_number)))"
+        mkdir -p ${outdir}
+
+        case ${tm5_istart} in
+            33|32) f=TM5_restart_${leg_start_date_yyyymmdd}_0000_glb300x200.nc
+                ;;
+            31) f=save_${leg_start_date_yyyymmdd}00_glb300x200.hdf
+                ;;
+        esac
+
+        mv $f ${outdir}
+    fi
+
+    # -------------------------------------------------------------------------
+    # *** Copy OASIS restart files to archive directory
+    #     NOTE: These files are copied and not moved as they are used in the
+    #           next leg!
+    #           Note also that the OASIS restart files present at the end of
+    #           the leg correspond to the start of the next leg!
+    # -------------------------------------------------------------------------
+    outdir="${archive_dir}/restart/oasis/$(printf %03d $((leg_number+1)))"
+    mkdir -p ${outdir}
+
+    for f in ${oas_rst_files}
+    do
+        test -f ${f} && cp ${f} ${outdir}
+    done
+
+    # -------------------------------------------------------------------------
+    # *** Copy rcf files to the archive directory (of the next leg!)
+    # -------------------------------------------------------------------------
+    outdir="${archive_dir}/restart/ifs/$(printf %03d $((leg_number+1)))"
+    mkdir -p ${outdir}
+
+    for f in rcf
+    do
+        test -f ${f} && cp ${f} ${outdir}
+    done
+
+    # -------------------------------------------------------------------------
+    # *** Move log files to archive directory
+    # -------------------------------------------------------------------------
+    outdir="${archive_dir}/log/$(printf %03d $((leg_number)))"
+    mkdir -p ${outdir}
+
+    for f in \
+        ifs.log ifs.stat fort.4 ocean.output \
+        time.step solver.stat guess.log run1/guess0.log \
+        amip.log namelist.amip \
+        nout.000000 debug.root.?? \
+        ctm.tm5.log.0
+    do
+        test -f ${f} && mv ${f} ${outdir}
+    done
+    has_config pisces && cp ocean.carbon ${outdir}
+
+    for f in ctm.tm5.log.*
+    do
+        if [[ -f ${f} ]]
+        then
+            [[ -s ${f} ]] && mv ${f} ${outdir} || \rm -f ${f}
+        fi
+    done
+
+    # -------------------------------------------------------------------------
+    # *** Write the restart control file
+    # -------------------------------------------------------------------------
+
+    # Compute CPMIP performance
+    sypd="$(cpmip_sypd $leg_length_sec $(($t2 - $t1)))"
+    ncores=0
+    has_config nemo      && (( ncores+=${nem_numproc}  )) || :
+    has_config ifs       && (( ncores+=${ifs_numproc}  )) || :
+    has_config xios      && (( ncores+=${xio_numproc}  )) || :
+    has_config rnfmapper && (( ncores+=${rnf_numproc}  )) || :
+    has_config lpjg      && (( ncores+=${lpjg_numproc} )) || :
+    has_config tm5       && (( ncores+=${tm5_numproc}  )) || :
+    has_config amip      && (( ncores+=${amip_numproc} )) || :
+    chpsy="$(cpmip_chpsy  $leg_length_sec $(($t2 - $t1)) $ncores)"
+
+    echo "#"                                             | tee -a ${ece_info_file}
+    echo "# Finished leg at `date '+%F %T'` after ${tr} (hh:mm:ss)" \
+                                                         | tee -a ${ece_info_file}
+    echo "# CPMIP performance: $sypd SYPD   $chpsy CHPSY"| tee -a ${ece_info_file}
+    echo "leg_number=${leg_number}"                      | tee -a ${ece_info_file}
+    echo "leg_start_date=\"${leg_start_date}\""          | tee -a ${ece_info_file}
+    echo "leg_end_date=\"${leg_end_date}\""              | tee -a ${ece_info_file}
+
+    # Need to reset force_run_from_scratch in order to avoid destroying the next leg
+    force_run_from_scratch=false
+    special_restart=false
+
+done # loop over legs
+
+# -----------------------------------------------------------------------------
+# *** Platform dependent finalising of the run
+# -----------------------------------------------------------------------------
+finalise
+
+exit 0

+ 299 - 0
runtime/classic/ecconf.cfg

@@ -0,0 +1,299 @@
+#!/bin/bash 
+
+# Platform dependent configuration functions for the 'zenobe' machine
+#(zenobe.hpc.cenaero.be)
+
+function configure()
+{
+    # This function should configure all settings/modules needed to
+    # later prepare the EC-Earth run directory and set variables used
+    # in the run script
+
+    # SCRATCH is not defined in MN3, define it here
+    # and also make sure it is defined when compiling
+    export SCRATCH=/gpfs/scratch/acad/ecearth/${USER}
+
+    # Configure paths for building/running EC-Earth
+    ecearth_src_dir=${HOME}/models/ecearth_3.3.3.2/sources
+    run_dir=/gpfs/scratch/acad/ecearth/${USER}/ecearth/run/${exp_name}
+    ini_data_dir=/gpfs/scratch/acad/ecearth/data/bsc32/v3.3.3.2/inidata
+    archive_dir=/gpfs/scratch/acad/ecearth/${USER}/ecearth/archive/${exp_name}
+
+    # File for standard output.
+    # NOTE: This will be modified for restart jobs!
+    stdout_file=${SLURM_SUBMIT_DIR-$PWD}/${SLURM_JOB_NAME-"local"}_${SLURM_JOB_ID-"id"}.log
+
+    # Resubmit this job for automatic restarts? [true/false]
+    # Also, add options for the resubmit command here.
+    resubmit_job=true
+    resubmit_opt=""
+
+    module load craype-x86-milan
+    module load PrgEnv-intel/8.3.3
+    MODULEPATH=$MODULEPATH:/gpfs/projects/acad/ecearth/softs/easybuild/modules/all
+    module load netCDF-Fortran/4.6.0-iompi-2022.05
+    module load imkl/2022.1.0 OpenJPEG/2.5.0-GCCcore-11.3.0 grib_api/1.24.0-iompi-2022.05
+
+    # Configure grib api paths
+    export GRIB_DEFINITION_PATH=${HOME}/models/ecearth_3.3.3.2/sources/util/grib_table_126:${EBROOTGRIB_API}/share/grib_api/definitions
+    export GRIB_SAMPLES_PATH=${EBROOTGRIB_API}/share/grib_api/ifs_samples/grib1
+    export GRIB_BIN_PATH=${EBROOTGRIB_API}/bin
+
+    # Configure number of processors per node
+    proc_per_node=128
+
+    # Use machinefiles or not
+    [[ `echo "$use_machinefile" | tr '[:upper:]' '[:lower:]'` == true ]] && use_machinefile=true || use_machinefile=false
+
+    ulimit -s unlimited
+
+    # Load specific MPI environment configuration
+    configure_mpi
+}
+
+function configure_python()
+{
+    # specific for python+eccodes setup - used for OSM pre/post-processing
+    # it would be simple to do the following in configure
+    # module load eccodes/2.8.0 python/2.7.13
+    module load eccodes/2.8.0 python/2.7.13
+    unset GRIB_DEFINITION_PATH
+    unset GRIB_SAMPLES_PATH
+    unset GRIB_BIN_PATH
+    export GRIB_BIN_PATH=/apps/ECCODES/2.8.0/INTEL/bin
+}
+
+function configure_mpi()
+{
+    [ -z "${OMP_NUM_THREADS-}" ] && export OMP_NUM_THREADS=1
+    #export I_MPI_DEBUG=5
+    #export I_MPI_ADJUST_BCAST=3
+    #export PSM2_MTU=8196
+    #export PSM2_MEMORY=large
+    #export PSM2_MQ_RNDV_HFI_THRESH=1
+    #export I_MPI_FABRIC=ofi
+    #unset I_MPI_PMI_LIBRARY
+    #export I_MPI_JOB_RESPECT_PROCESS_PLACEMENT=0
+    #export I_MPI_FABRICS=shm:ofi
+}
+
+function get_hosts()
+{
+    # This function uses a scheduler command to get the hosts allocated for the current job
+    hosts=(`scontrol show hostname | paste -s`)
+}
+
+function machinefile_config()
+{
+    # User configuration starts here
+    # hard-coded c4mip configurations, must use the proper _numproc settings
+    if has_config ifs nemo pisces rnfmapper xios lpjg ; then
+      if ! has_config tm5 ; then
+            ifs_ppn=48 ; [[ ${ifs_numproc}  != 336 ]] && info "wrong numproc setting for ifs in machinefile_config" || true
+            nem_ppn=43 ; [[ ${nem_numproc}  != 380 ]] && info "wrong numproc setting for nemo in machinefile_config" || true
+            xio_ppn=5  ; [[ ${xio_numproc}  !=   5 ]] && info "wrong numproc setting for xios in machinefile_config" || true
+            lpjg_ppn=5 ; [[ ${lpjg_numproc} !=  40 ]] && info "wrong numproc setting for lpjg in machinefile_config" || true
+        else
+            ifs_ppn=48 ; [[ ${ifs_numproc}  != 256 ]] && info "wrong numproc setting for ifs in machinefile_config" || true
+            nem_ppn=46 ; [[ ${nem_numproc}  != 192 ]] && info "wrong numproc setting for nemo in machinefile_config" || true
+            xio_ppn=2  ; [[ ${xio_numproc}  !=   2 ]] && info "wrong numproc setting for xios in machinefile_config" || true
+            lpjg_ppn=2 ; [[ ${lpjg_numproc} !=   8 ]] && info "wrong numproc setting for lpjg in machinefile_config" || true
+            tm5_ppn=4  ; [[ ${tm5_numproc}  !=   4 ]] && info "wrong numproc setting for tm5 in machinefile_config" || true
+        fi
+      else
+        # Add any new exclusive binary here
+        ifs_exc=TRUE
+        nem_exc=TRUE
+        xio_exc=TRUE
+        lpjg_exc=TRUE
+        tm5_exc=TRUE
+
+        # Modify the allocation to each binary using more than one process here
+        ifs_ppn=48
+        nem_ppn=48
+        xio_ppn=48
+        lpjg_ppn=48
+        tm5_ppn=45
+    fi
+}
+
+function machinefile_init()
+{
+    # Get max processes per node from the platform variable
+    max_ppn=$proc_per_node
+
+    components=( ifs nem xio rnf amip lpjg )
+    if $(has_config tm5)
+    then
+      components=( "${components[@]}" "tm5" )
+    fi
+    
+    for component in ${components[@]}
+    do
+      eval ${component}_exc=FALSE
+      eval ${component}_ppn=1
+    done
+  
+    # Call user configuration and get_host functions
+    machinefile_config
+    get_hosts
+ 
+    # Declare array to store the processes as they are assigned
+    declare -a -g processes_hosts
+    for n in `seq 0 ${#hosts[@]}`
+    do
+      processes_hosts[$n]=0
+    done
+
+    > machinefile
+    
+    current_hostid=0
+}
+
+machinefile_find_available_node()
+{
+  while [ $((${processes_hosts[$current_hostid]} + ${!ppn})) -gt $max_ppn ]
+  do
+    let "current_hostid += 1"
+  done
+}
+
+machinefile_add()
+{
+  total_proc=$2
+  # Iterate through all the possible binaries
+  for component in ${components[@]} 
+  do
+    binary="${component}_exe_file"
+    exclusive="${component}_exc"
+    # Check if the current binary matches the input executable
+    if [ ./$(basename ${!binary}) = "$1" ]
+    then
+       ppn="${component}_ppn"
+       # Exclusive mode: start allocation at the first empty node
+       if [[ ${!exclusive} == "TRUE" ]]
+       then
+         while [ ${processes_hosts[$current_hostid]} -gt 0 ]
+         do
+           let "current_hostid += 1"
+         done
+       # Shared mode: start allocation in the first node with enough free cores
+       # Notice that only the first node is checked
+       # Then, if a previous binary had "exc=TRUE", allocation space is not ensure in subsequent nodes
+       else
+         current_hostid=0
+         machinefile_find_available_node
+       fi
+
+       # Allocate ppn cores in each of the subsequent nodes till there are no more processes to assign
+       count=0
+       while [ ${total_proc} -gt 0 ]
+       do
+         if [ ${current_hostid} -ge ${#hosts[@]} ]
+         then
+           echo "Not enough computing nodes"
+           exit 1
+         fi
+         current_hostname=${hosts[$current_hostid]}
+         while [[ ${total_proc} -gt 0 && ${count} -lt ${!ppn} ]] 
+         do 
+           echo ${hosts[$current_hostid]} >> machinefile
+           let "count += 1"
+           let "processes_hosts[$current_hostid] += 1"
+           let "total_proc -= 1" || true 
+         done 
+         if [ ${count} -eq ${!ppn} ]
+         then
+           let "current_hostid += 1"
+           machinefile_find_available_node
+           count=0
+         fi
+       done
+    fi
+  done
+}
+
+function launch()
+{
+    # Compute and check the node distribution
+    info "======================="
+    info "Node/proc distribution:"
+    info "-----------------------"
+    info "IFS:    ${ifs_numproc}"
+    info "NEMO:   ${nem_numproc}"
+    info "XIOS:   ${xio_numproc}"
+    info "RUNOFF: ${rnf_numproc}"
+    info "======================="
+
+    cmd="mpirun"
+    cat /dev/null > prog.conf
+    proc_id=0
+
+    if [ "$use_machinefile" = "true" ]
+    then
+      cmd="mpirun -machinefile machinefile"
+      machinefile_init
+    fi
+
+    while (( "$#" ))
+    do
+        # Get number of MPI ranks and executable name
+        nranks=$1
+        executable=./$(basename $2)
+        
+        if [ "$use_machinefile" = "true" ]
+        then
+          machinefile_add $executable $nranks
+        fi
+
+        shift
+        shift
+
+        cmd+=" -n $nranks $executable"
+
+        # Add any arguments to executable
+        while (( "$#" )) && [ "$1" != "--" ]
+        do
+            cmd+=" $1"
+            shift
+        done
+        shift || true
+
+        for i in $(eval echo "{1..${nranks}}")
+        do
+            echo "$proc_id ${executable}" >> prog.conf
+            proc_id=$(($proc_id+1))
+        done
+
+        # Add colon of more executables follow
+        (( "$#" )) && cmd+=" :"
+    done
+
+    #cmd="srun --kill-on-bad-exit=1 --multi-prog prog.conf"
+
+    pwd
+    echo $cmd
+    #exit
+    $cmd
+}
+
+function finalise()
+{
+    # This function should execute of any post run functionality, e.g.
+    # platform dependent cleaning or a resubmit
+
+    if ${resubmit_job} && [ $(date -d "${leg_end_date}" +%s) -lt $(date -d "${run_end_date}" +%s) ]
+    then
+        info "Resubmitting job for leg $((leg_number+1))"
+        # Need to go to start_dir to find the run script
+        cd ${start_dir}
+        # Submit command
+        # Note: This does not work if you specify a job name with sbatch -J jobname!
+        sbatch -N ${SLURM_JOB_NUM_NODES}                                                 \
+               -o ${run_dir}/$(basename ${stdout_file}).$(printf %03d $((leg_number+1))) \
+               -e ${run_dir}/$(basename ${stdout_file}).$(printf %03d $((leg_number+1))) \
+               -d ${SLURM_JOB_ID}                                                        \
+               ${resubmit_opt}                                                           \
+               ./${SLURM_JOB_NAME}.sh
+    fi
+}

+ 51 - 0
sources/amip-forcing/src/Makefile

@@ -0,0 +1,51 @@
+executable = ../bin/amip-forcing.exe
+
+FC      = mpif90
+FFLAGS  = -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+
+LD      = mpif90
+LDFLAGS = -O3 -fp-model precise -march=core-avx2
+
+OASIS_BASE_DIR = ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf
+OASIS_MOD_DIR  = $(OASIS_BASE_DIR)/build/lib/psmile.MPI1
+OASIS_INC_DIR  = $(OASIS_BASE_DIR)/lib/psmile/include
+OASIS_LIB_DIR  = $(OASIS_BASE_DIR)/lib
+OASIS_LIBS     = -lpsmile.MPI1 -lmct -lmpeu -lscrip
+
+MPI_BASE_DIR            := $(EBROOTIMPI)/mpi/latest
+ifneq ($(strip $(MPI_BASE_DIR)),)
+MPI_INC_DIR             := $(MPI_BASE_DIR)$(addprefix /,include)
+MPI_LIB_DIR             := $(MPI_BASE_DIR)$(addprefix /,lib)
+endif
+MPI_LIBS                := mpi mpifort
+
+ifneq ($(strip /),)
+NETCDF_LIB_DIR          := //$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+NETCDF_INC_DIR          := //$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+endif
+NETCDF_LIBS             := netcdff netcdf
+
+VPATH = cplng
+
+cplng_srcs = \
+    cplng_data_mod.F90 \
+    cplng_init_mod.F90 \
+    cplng_config_mod.F90 \
+    cplng_exchange_mod.F90 \
+    cplng_finalize_mod.F90 \
+    cplng_mod.F90
+
+amipforc_srcs = amip_forcing_mod.F90 amip_forcing.F90
+
+$(executable): config_mod.o $(cplng_srcs:.F90=.o) $(amipforc_srcs:.F90=.o)
+	$(LD) $(LDFLAGS) -o $@ $^ -L$(OASIS_LIB_DIR) $(OASIS_LIBS) \
+	    $(addprefix -L,$(NETCDF_LIB_DIR))  $(addprefix -l,$(NETCDF_LIBS))  \
+	    $(addprefix -L,$(MPI_LIB_DIR))     $(addprefix -l,$(MPI_LIBS))
+
+
+%.o: %.F90
+	$(FC) $(FFLAGS) -c -I$(OASIS_MOD_DIR) $(addprefix -I,$(NETCDF_INC_DIR)) $(addprefix -I,$(MPI_INC_DIR)) $<
+
+.PHONY: clean
+clean:
+	@rm -f *.o *.mod

+ 1 - 0
sources/config-build.xml

@@ -154,5 +154,6 @@
     <xi:include href="platform/nsc-tetralith-intel-intelmpi.xml" />
     <xi:include href="platform/pdc-beskow-intel-craympi.xml" />
     <xi:include href="platform/surfsara-cartesius.xml" />
+    <xi:include href="platform/lucia-cray-iompi.xml" />
 
 </Configuration>

+ 65 - 0
sources/ifs-36r4/Makefile.d/Makefile.config.ecconf

@@ -0,0 +1,65 @@
+$(info --> Reading $(lastword $(MAKEFILE_LIST)))
+
+comma := ,
+
+ECEARTH_SRC_DIR         := ${HOME}/models/ecearth_3.3.3.2/sources
+
+MPI_BASE_DIR            := 
+ifneq ($(strip $(MPI_BASE_DIR)),)
+MPI_INC_DIR             := $(MPI_BASE_DIR)$(addprefix /,)
+MPI_LIB_DIR             := $(MPI_BASE_DIR)$(addprefix /,)
+endif
+MPI_LIBS                := 
+
+OASIS_BASE_DIR          := $(ECEARTH_SRC_DIR)/oasis3-mct
+OASIS_ARCH              := ecconf
+OASIS_MPI_CHAN          := MPI1
+OASIS_MOD_DIR           := $(OASIS_BASE_DIR)/$(OASIS_ARCH)/build/lib/psmile.$(OASIS_MPI_CHAN)
+OASIS_LIB_DIR           := $(OASIS_BASE_DIR)/$(OASIS_ARCH)/lib
+OASIS_LIBS              := psmile.MPI1 mct mpeu scrip
+
+ifneq ($(strip /),)
+NETCDF_LIB_DIR          := $(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+NETCDF_INC_DIR          := $(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+endif
+NETCDF_LIBS             := netcdff netcdf
+
+LAPACK_BASE_DIR         := $(EBROOTIMKL)/mkl/latest
+ifneq ($(strip $(LAPACK_BASE_DIR)),)
+LAPACK_LIB_DIR          := $(LAPACK_BASE_DIR)$(addprefix /,lib/intel64)
+endif
+LAPACK_LIBS             := mkl_intel_lp64 mkl_core mkl_sequential
+
+ifneq ($(strip $(EBROOTGRIB_API)),)
+GRIBAPI_INC_DIR         := $(EBROOTGRIB_API)/include
+GRIBAPI_LIB_DIR         := $(EBROOTGRIB_API)/lib
+endif
+GRIBAPI_LIBS            := grib_api_f90 grib_api
+
+ifneq ($(strip /gpfs/projects/acad/ecearth/softs/gribex/iompi-2022.05),)
+GRIBEX_LIB_DIR          := /gpfs/projects/acad/ecearth/softs/gribex/iompi-2022.05/lib
+endif
+GRIBEX_LIBS             := gribexR64
+
+MAKEDEPF90              := $(ECEARTH_SRC_DIR)/util/makedepf90/bin/makedepf90
+
+F90                     := mpif90
+F90FLAGS                := -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+
+IFSAUX_ADD_F90FLAGS     := 
+
+FC                      := mpif90
+FFLAGS                  := -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+
+FPPDEFS                 := $(addprefix -D,linux LINUX LITTLE LITTLE_ENDIAN POINTER_64 BLAS)
+
+CC                      := mpicc
+CFLAGS                  := -fp-model precise -march=core-avx2
+CPPDEFS                 := $(addprefix -D,linux LINUX LITTLE LITTLE_ENDIAN POINTER_64 BLAS)
+
+LD                      := $(F90)
+LDFLAGS                 := -fp-model precise -march=core-avx2
+
+AR                      := ar
+ARFLAGS                 := curv
+ARFLAGS_EXTRACT         := p

+ 7 - 0
sources/ifs-36r4/src/surf/offline/fcm-inc.cfg

@@ -0,0 +1,7 @@
+$OSM_FC      = mpif90
+$OSM_FFLAGS  = -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+$OSM_LFLAGS  = -O3 -fp-model precise -march=core-avx2
+$OSM_INCLUDE_PATH = //$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf/build/lib/psmile.MPI1 $(EBROOTIMPI)/mpi/latest/include
+$OSM_LIBS = netcdff netcdf psmile.MPI1 mct mpeu scrip mpi mpifort
+$OSM_LIB_PATHS = //$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf/lib $(EBROOTIMPI)/mpi/latest/lib
+$OSM_FOMP = -qopenmp

+ 232 - 0
sources/lpjg/CMakeLists.txt

@@ -0,0 +1,232 @@
+################################################################################
+# CMake configuration file for building LPJ-GUESS
+#
+# To build LPJ-GUESS with this build system cmake needs to be installed.
+# If it's not installed it can be downloaded for free from www.cmake.org.
+#
+
+cmake_minimum_required(VERSION 2.8)
+if (UNIX)
+        set(CMAKE_Fortran_COMPILER "mpif90")
+endif(UNIX)
+
+project(guess)
+
+#select grid resolution
+#set (GRID "T255" CACHE STRING "Grid <T159/T255>")
+set (GRID T255)
+if (GRID MATCHES "T255")
+   add_definitions(-DGRID_T255)
+elseif (GRID MATCHES "T159")
+   add_definitions(-DGRID_T159)
+else()
+   exit(-1)
+endif()
+
+
+# should we compress output?
+set(COMPRESS_OUTPUT true)
+if(COMPRESS_OUTPUT)
+   set(LIBS ${LIBS} z)
+   add_definitions(-DCOMPRESS_OUTPUT)
+   message(STATUS "Output compression has been enabled.")
+endif(COMPRESS_OUTPUT)
+
+# Compiler flags for building with Microsoft Visual C++
+if (MSVC)
+  # Disable warnings about using secure functions like sprintf_s instead of
+  # regular sprintf etc.
+  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /D _CRT_SECURE_NO_WARNINGS")
+endif()
+
+# The following are configuration variables for the build. 
+# Rather than editing them here, you should either edit them
+# in the graphical cmake tools (cmake-gui on Windows, ccmake
+# on Unix) or pass them in as parameters to cmake.
+
+if (UNIX)
+	enable_language(Fortran)
+endif(UNIX)
+
+# A variable controlling whether or not to include unit tests
+# Unit tests are disabled in old VS 6.0, since CATCH doesn't compile in such 
+# an old compiler.
+if (NOT MSVC_VERSION EQUAL "1200")
+  set(UNIT_TESTS "OFF" CACHE BOOL "Whether to include unit tests")
+endif()
+
+if (UNIX)
+  # Setup the SYSTEM variable, currently only used to choose which 
+  # submit.sh to generate (for submitting to job queue)
+  
+  # Figure out what value it should have initially, based on the
+  # environment variable ARCH if it's set.
+  if (NOT $ENV{ARCH} STREQUAL "")
+	set(DEFAULT_SYSTEM $ENV{ARCH})
+  else()
+	set(DEFAULT_SYSTEM "")
+  endif()
+
+  set(SYSTEM ${DEFAULT_SYSTEM} CACHE STRING "System to build for (empty (=simba), gimle, platon, alarik or multicore)")
+endif (UNIX)
+
+# Where to search for cmake modules
+# (used by cmake when the include() command is used in a cmake file)
+set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH})
+
+# NetCDF - look for libraries and include files, and use them if found
+find_package(NetCDF QUIET)
+
+# NB! Adding the following line to .bashrc helps cmake to find the netcdf files:
+# export CMAKE_PREFIX_PATH=/software/apps/netcdf/4.3.2/i1402-hdf5-1.8.14/
+
+if (NETCDF_FOUND)
+	# Windows version:
+	#  include_directories(${NETCDF_INCLUDE_DIRS})
+        # Unix version:
+        if (UNIX)
+                include_directories(${NETCDF_INCLUDE_DIRS}
+                ${guess_SOURCE_DIR}/../oasis3-mct/ecconf/build/lib/psmile.MPI1)
+        else()
+                include_directories(${NETCDF_INCLUDE_DIRS})
+        endif(UNIX)
+
+        set(LIBS ${LIBS} ${NETCDF_LIBRARIES})
+        add_definitions(-DHAVE_NETCDF)
+
+        # Unix only:
+        if (UNIX)
+                link_directories(${guess_SOURCE_DIR}/../oasis3-mct/ecconf/lib
+                ${NETCDF_INCLUDE_DIRS}/../lib)
+        set(LIBS ${LIBS} psmile.MPI1 mct mpeu scrip netcdf netcdff)
+        endif (UNIX)
+else()
+        include_directories(//$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+        ${guess_SOURCE_DIR}/../oasis3-mct/ecconf/build/lib/psmile.MPI1)
+
+        link_directories(${guess_SOURCE_DIR}/../oasis3-mct/ecconf/lib
+        //$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+        ${guess_SOURCE_DIR}/../oasis3-mct/ecconf/build/lib/psmile.MPI1)
+
+        add_definitions(-DHAVE_NETCDF)
+
+        set(LIBS ${LIBS} psmile.MPI1 mct mpeu scrip
+        netcdff netcdf)
+endif()
+
+
+# MPI - used if found (not needed on Windows)
+if (NOT CMAKE_HOST_WIN32)
+  find_package(MPI QUIET)
+endif()
+
+# These are deprecated according to documentation in the FindMPI module,
+# but for some reason not hidden. Let's not display them for the typical
+# LPJ-GUESS user who hasn't got MPI installed.
+mark_as_advanced(MPI_LIBRARY MPI_EXTRA_LIBRARY)
+
+if (MPI_FOUND)
+  include_directories(${MPI_INCLUDE_PATH})
+  set(LIBS ${LIBS} ${MPI_LIBRARIES})
+  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${MPI_COMPILE_FLAGS}")
+
+  # The following is needed because the 3 lines above assume that, if
+  # MPI_FOUND, ${MPI_INCLUDE_PATH} and ${MPI_LIBRARIES} and
+  # ${MPI_COMPILE_FLAGS} are defined. This is not necessarily the case, for
+  # example on the CRAY at cca.
+  include_directories($(EBROOTIMPI)/mpi/latest/include)
+  link_directories($(EBROOTIMPI)/mpi/latest/lib)
+  set(LIBS ${LIBS} mpi mpifort)
+
+  add_definitions(-DHAVE_MPI)
+else()
+
+  include_directories($(EBROOTIMPI)/mpi/latest/include)
+  link_directories($(EBROOTIMPI)/mpi/latest/lib)
+  add_definitions(-DHAVE_MPI)
+  set(LIBS ${LIBS} mpi mpifort)
+
+endif()
+
+# Where the compiler should search for header files
+include_directories(${guess_SOURCE_DIR}/framework
+${guess_SOURCE_DIR}/libraries/gutil ${guess_SOURCE_DIR}/libraries/plib
+${guess_SOURCE_DIR}/libraries/guessnc ${guess_SOURCE_DIR}/modules ${guess_SOURCE_DIR}/cru/guessio)
+
+# The following directories contain source code and 
+# additional CMakeLists.txt files
+add_subdirectory(framework)
+add_subdirectory(modules)
+add_subdirectory(cru)
+add_subdirectory(libraries)
+
+if (UNIT_TESTS)
+  add_subdirectory(tests)
+endif()
+
+# Add the command line program's target
+if (GRID MATCHES "T255")
+  if (WIN32)
+    # Let the exe be called guesscmd so it doesn't collide with the dll target
+    set(guess_command_name "guesscmd_T255")
+  else()
+    # On Unix we don't have the dll so the command line binary can be called guess
+    set(guess_command_name "guess_T255")
+  endif()
+elseif(GRID MATCHES "T159")
+  if (WIN32)
+    # Let the exe be called guesscmd so it doesn't collide with the dll target
+    set(guess_command_name "guesscmd_T159")
+  else()
+    # On Unix we don't have the dll so the command line binary can be called guess
+    set(guess_command_name "guess_T159")
+  endif()
+endif()	
+# Specify the executable to build, and which sources to build it from
+add_executable(${guess_command_name} ${guess_sources} command_line_version/main.cpp)
+
+# Rule for building the unit test binary
+if (UNIT_TESTS)
+  add_executable(runtests ${guess_sources} ${test_sources})
+endif()
+
+# Specify libraries to link to the executable
+target_link_libraries(${guess_command_name} ${LIBS})
+
+if (WIN32)
+  # Create guess.dll (used with the graphical Windows shell)
+  add_library(guess SHARED ${guess_sources} windows_version/dllmain.cpp)
+
+  # Specify libraries to link to the dll
+  target_link_libraries(guess ${LIBS})
+endif (WIN32)
+
+# The custom build rule for generating the submit script from a template.
+# The submit script is generated each time the command line binary is built.
+# Removed in EC-Earth branch! 
+
+# Rule for running unit tests automatically
+if (UNIT_TESTS)
+  add_custom_command(TARGET runtests 
+    POST_BUILD 
+    COMMAND runtests 
+    COMMENT "Running tests")
+endif()
+
+if (UNIX)
+   # pgCC 6 doesn't seem to recognize -rdynamic, so remove it
+   # (we shouldn't need it anyway)
+   # It seems the CMake developers have fixed this in newer versions
+   # (sometime after 2.8)
+   SET(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
+   SET(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
+endif(UNIX)
+
+if (UNIX)
+  # Set default build type to Release on Unix
+  if(NOT CMAKE_BUILD_TYPE)
+    set(CMAKE_BUILD_TYPE Release CACHE STRING
+      "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel."
+      FORCE)
+  endif(NOT CMAKE_BUILD_TYPE)
+endif(UNIX)

+ 58 - 0
sources/lpjg/offline/Makefile

@@ -0,0 +1,58 @@
+#adapted from runoff-mapper Makefile template
+
+FC      = mpif90
+FFLAGS  = -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+
+LD      = mpif90
+LDFLAGS = -O3 -fp-model precise -march=core-avx2
+
+FPP     = fpp
+FFLAGS_FPP_PREFIX = -D
+
+OASIS_BASE_DIR = ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf
+OASIS_MOD_DIR  = $(OASIS_BASE_DIR)/build/lib/psmile.MPI1
+OASIS_INC_DIR  = $(OASIS_BASE_DIR)/lib/psmile/include
+OASIS_LIB_DIR  = $(OASIS_BASE_DIR)/lib
+OASIS_LIBS     = -lpsmile.MPI1 -lmct -lmpeu -lscrip
+
+MPI_BASE_DIR            := $(EBROOTIMPI)/mpi/latest
+ifneq ($(strip $(MPI_BASE_DIR)),)
+MPI_INC_DIR             := $(MPI_BASE_DIR)$(addprefix /,include)
+MPI_LIB_DIR             := $(MPI_BASE_DIR)$(addprefix /,lib)
+endif
+MPI_LIBS                := mpi mpifort
+
+ifneq ($(strip /),)
+NETCDF_LIB_DIR          := //$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+NETCDF_INC_DIR          := //$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+endif
+NETCDF_LIBS             := netcdff netcdf
+
+all: lpjg_forcing ccycle_coupling
+
+# add any new lpjg_forcing_xxx here, currently only IFS forcing implemented
+
+lpjg_forcing: lpjg_forcing_ifs_T159 lpjg_forcing_ifs_T255
+
+lpjg_forcing_ifs_T%: lpjg_forcing_ifs_T%.o
+	$(LD) $(LDFLAGS) -o $@ $^ -L$(OASIS_LIB_DIR) $(OASIS_LIBS) \
+	    $(addprefix -L,$(NETCDF_LIB_DIR))  $(addprefix -l,$(NETCDF_LIBS))  \
+	    $(addprefix -L,$(MPI_LIB_DIR))     $(addprefix -l,$(MPI_LIBS))
+
+lpjg_forcing_ifs_T159.f90:
+	$(FPP) $(FFLAGS_FPP_PREFIX)IFS_RES_T159 lpjg_forcing_ifs.F90 > $@
+
+lpjg_forcing_ifs_T255.f90:
+	$(FPP) $(FFLAGS_FPP_PREFIX)IFS_RES_T255 lpjg_forcing_ifs.F90 > $@
+
+ccycle_coupling: ccycle_coupling.o
+	$(LD) $(LDFLAGS) -o $@ $^ -L$(OASIS_LIB_DIR) $(OASIS_LIBS) \
+	    $(addprefix -L,$(NETCDF_LIB_DIR))  $(addprefix -l,$(NETCDF_LIBS))  \
+	    $(addprefix -L,$(MPI_LIB_DIR))     $(addprefix -l,$(MPI_LIBS))
+
+%.o: %.f90
+	$(FC) $(FFLAGS) -c -I$(OASIS_MOD_DIR) $(addprefix -I,$(NETCDF_INC_DIR)) $(addprefix -I,$(MPI_INC_DIR)) $<
+
+.PHONY: clean
+clean:
+	@rm -f *.o *.mod  lpjg_forcing_ifs_T*.f90 lpjg_forcing_ifs_T159 lpjg_forcing_ifs_T255 ccycle_coupling

+ 68 - 0
sources/nemo-3.6/ARCH/arch-ecconf.fcm

@@ -0,0 +1,68 @@
+# EC-EARTH3 ec-conf-based configuration
+#
+# NCDF_HOME   root directory containing lib and include subdirectories for netcdf4
+# HDF5_HOME   root directory containing lib and include subdirectories for HDF5
+# XIOS_HOME   root directory containing lib for XIOS
+# OASIS_HOME  root directory containing lib for OASIS
+#
+# NCDF_INC    netcdf4 include file
+# NCDF_LIB    netcdf4 library
+# XIOS_INC    xios include file    (taken into accound only if key_iomput is activated)
+# XIOS_LIB    xios library         (taken into accound only if key_iomput is activated)
+# OASIS_INC   oasis include file   (taken into accound only if key_oasis3 is activated)
+# OASIS_LIB   oasis library        (taken into accound only if key_oasis3 is activated)
+#
+# FC          Fortran compiler command
+# FCFLAGS     Fortran compiler flags
+# FFLAGS      Fortran 77 compiler flags
+# LD          linker
+# LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries
+# FPPFLAGS    pre-processing flags
+# AR          assembler
+# ARFLAGS     assembler flags
+# MK          make
+# USER_INC    complete list of include files
+# USER_LIB    complete list of libraries to pass to the linker
+#
+# Note that:
+#  - unix variables "$..." are accpeted and will be evaluated before calling fcm.
+#  - fcm variables are starting with a % (and not a $)
+
+%HDF5_HOME           $(EBROOTHDF5)
+%HDF5_LIB            $(addprefix -L,$(addsuffix /,$(EBROOTHDF5))lib) \
+                     $(addprefix -l,hdf5_hl hdf5)
+
+%NCDF_INC            $(addprefix -I,$(addsuffix /,/)$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include)
+%NCDF_LIB            $(addprefix -L,$(addsuffix /,/)$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib) \
+                     $(addprefix -l,netcdff netcdf)
+
+%MPI_INC             $(addprefix -I,/)
+%MPI_LIB             $(addprefix -L,$(addsuffix /,)) \
+                     $(addprefix -l,)
+
+%XIOS_HOME           ${HOME}/models/ecearth_3.3.3.2/sources/xios-2.5
+%XIOS_INC            -I%XIOS_HOME/inc
+%XIOS_LIB            -L%XIOS_HOME/lib -lxios
+
+%OASIS_HOME          ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf
+%OASIS_INC           -I%OASIS_HOME/build/lib/psmile.MPI1
+%OASIS_LIB           -L%OASIS_HOME/lib -lpsmile.MPI1 -lmct -lmpeu -lscrip
+
+%CPP                 cpp
+
+%FC                  mpif90
+%FCFLAGS             -r8 -O3 -march=core-avx2 -ip -fp-model precise 
+%FFLAGS              %FCFLAGS
+
+%FPPFLAGS            -P -traditional
+
+%LD                  mpif90
+%LDFLAGS              -lstdc++
+
+%AR                  ar
+%ARFLAGS             curv
+
+%MK                  make
+
+%USER_INC            %XIOS_INC %OASIS_INC %NCDF_INC %MPI_INC
+%USER_LIB            %XIOS_LIB %OASIS_LIB %NCDF_LIB %MPI_LIB

+ 63 - 0
sources/oasis3-mct/util/make_dir/Makefile.d/Makefile.config.ecconf

@@ -0,0 +1,63 @@
+
+$(info --> Reading configuration from $(lastword $(MAKEFILE_LIST)))
+
+# ============================================================================
+# *** Configuration for gimle-intel-scampi
+# ============================================================================
+
+COUPLE := ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct
+
+# CHAN : MPI1/MPI2
+CHAN   := MPI1
+
+# MPI library
+MPIDIR      := 
+ifneq ($(strip $(MPIDIR)),)
+MPIBIN      := $(MPIDIR)/bin
+MPI_INCLUDE := $(MPIDIR)$(addprefix /,)
+MPILIB      := -L$(MPIDIR)$(addprefix /,)
+endif
+MPILIB      += $(addprefix -l,)
+
+# NETCDF library
+NETCDF_DIR      := /
+NETCDF_INCLUDE  := $(addprefix $(addsuffix /,$(NETCDF_DIR)),$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include)
+NETCDF_LIBRARY  := $(addprefix -L,$(addprefix $(addsuffix /,$(NETCDF_DIR)),$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib))
+NETCDF_LIBRARY  += $(addprefix -l,netcdff netcdf)
+
+# Compiling and other commands
+MAKE    := make
+F90     := mpif90 -march=core-avx2 
+F       := $(F90)
+f90     := $(F90)
+f       := $(F)
+CC      := mpicc
+LD      := mpif90
+AR      := ar
+ARFLAGS := curv
+
+# CPP/FPP macros
+MACROS   := use_libMPI use_comm_$(CHAN) use_netCDF TREAT_OVERLAY
+MACROS   += 
+
+# Compiler options
+F90FLAGS := -r8 -O3 -ip -fp-model precise -extend-source 132 
+F90FLAGS += 
+F90FLAGS += $(addprefix -D,$(MACROS))
+F90FLAGS += $(addprefix -I,$(MPI_INCLUDE))
+F90FLAGS += $(addprefix -I,$(NETCDF_INCLUDE))
+
+f90FLAGS := $(F90FLAGS)
+FFLAGS   := $(F90FLAGS)
+fFLAGS   := $(F90FLAGS)
+
+CCFLAGS  := -O2 -fp-model precise -march=core-avx2 
+CCFLAGS  += $(addprefix -D,$(MACROS))
+CCFLAGS  := $(addprefix -I,$(MPI_INCLUDE))
+CCFLAGS  += $(addprefix -I,$(NETCDF_INCLUDE))
+
+LDFLAGS  := 
+LDFLAGS  += 
+
+# MPP_IOOPT needed for compiling mpp_io
+MPP_IOOPT :=

+ 415 - 0
sources/platform/lucia-cray-iompi.xml

@@ -0,0 +1,415 @@
+<?xml version="1.0" encoding="UTF-8"?>
+    <Platform name="lucia-cray-iompi">
+        <Description>
+            HOST:        Lucia at CENAERO
+            ARCH:        linux_x86_64
+            CPU MODEL:   AMD EPYC 7763
+            USER:        pbarriat
+            COMPILER:    intel-compilers-2022.1.0 
+            MPI:         OpenMPI/4.1.4
+	    BLAS/LAPACK: imkl/2022.1.0
+        </Description>
+
+        <Parameter name="ECEARTH_SRC_DIR">
+            <Description>Base directory for EC-Earth sources</Description>
+            <Type>PATH</Type>
+            <Value>${HOME}/models/ecearth_3.3.3.2/sources</Value>
+        </Parameter>
+
+        <Parameter name="MPI_BASE_DIR">
+            <Description>MPI base directory</Description>
+            <Type>PATH</Type>
+	    <Value>$(EBROOTIMPI)/mpi/latest</Value>
+        </Parameter>
+
+        <Parameter name="MPI_INC_SUBDIR">
+            <Description>MPI include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>include</Value>
+        </Parameter>
+
+        <Parameter name="MPI_LIB_SUBDIR">
+            <Description>MPI lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="MPI_LIBS_WITHOUT_L">
+            <Description>MPI libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>mpi mpifort</Value>
+        </Parameter>
+
+        <Parameter name="LAPACK_BASE_DIR">
+            <Description>LAPACK base directory</Description>
+	    <Type>PATH</Type>
+	    <Value>$(EBROOTIMKL)/mkl/latest</Value>
+        </Parameter>
+
+        <Parameter name="LAPACK_LIB_SUBDIR">
+            <Description>LAPACK lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib/intel64</Value>
+        </Parameter>
+
+        <Parameter name="LAPACK_LIBS_WITHOUT_L">
+            <Description>LAPACK libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>mkl_intel_lp64 mkl_core mkl_sequential</Value>
+        </Parameter>
+
+        <Parameter name="NETCDF_BASE_DIR">
+            <Description>NetCDF base directory</Description>
+            <Type>PATH</Type>
+	    <Value>/</Value>
+        </Parameter>
+
+        <Parameter name="NETCDF_INC_SUBDIR">
+            <Description>NetCDF include directory relative to base dir</Description>
+            <Type>PATH</Type>
+	    <Value>$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include</Value>
+        </Parameter>
+
+        <Parameter name="NETCDF_LIB_SUBDIR">
+            <Description>NetCDF lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+	    <Value>$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib</Value>
+        </Parameter>
+
+        <Parameter name="NETCDF_LIBS_WITHOUT_L">
+            <Description>NetCDF libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>netcdff netcdf</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_BASE_DIR">
+            <Description>GRIB API base directory</Description>
+            <Type>PATH</Type>
+            <Value>$(EBROOTGRIB_API)</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_INC_SUBDIR">
+            <Description>GRIB API include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>include</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_LIB_SUBDIR">
+            <Description>GRIB API lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_LIBS_WITHOUT_L">
+            <Description>GRIB_API libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>grib_api_f90 grib_api</Value>
+        </Parameter>
+
+        <Parameter name="GRIBEX_BASE_DIR">
+            <Description>GRIBEX base directory</Description>
+            <Type>PATH</Type>
+	    <Value>/gpfs/projects/acad/ecearth/softs/gribex/iompi-2022.05</Value>
+        </Parameter>
+
+        <Parameter name="GRIBEX_LIB_SUBDIR">
+            <Description>GRIBEX lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="GRIBEX_LIBS_WITHOUT_L">
+            <Description>GRIBEX libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>gribexR64</Value>
+        </Parameter>
+
+        <Parameter name="JPEG_BASE_DIR">
+            <Description>JPEG base directory</Description>
+            <Type>PATH</Type>
+            <Value>$(EBROOTOPENJPEG)</Value>
+        </Parameter>
+
+        <Parameter name="JPEG_INC_SUBDIR">
+            <Description>JPEG include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>include</Value>
+        </Parameter>
+
+        <Parameter name="JPEG_LIB_SUBDIR">
+            <Description>JPEG lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="JPEG_LIBS_WITHOUT_L">
+            <Description>JPEG libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>openjpeg</Value>
+        </Parameter>
+
+        <Parameter name="SZIP_BASE_DIR">
+            <Description>SZIP base directory</Description>
+            <Type>PATH</Type>
+            <Value>$(EBROOTSZIP)</Value>
+        </Parameter>
+
+        <Parameter name="SZIP_INC_SUBDIR">
+            <Description>SZIP include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>include</Value>
+        </Parameter>
+
+        <Parameter name="SZIP_LIB_SUBDIR">
+            <Description>SZIP lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="SZIP_LIBS_WITHOUT_L">
+            <Description>SZIP libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>sz</Value>
+        </Parameter>
+
+        <Parameter name="HDF4_BASE_DIR">
+            <Description>HDF4 base directory</Description>
+            <Type>PATH</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="HDF4_INC_SUBDIR">
+            <Description>HDF4 include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="HDF4_LIB_SUBDIR">
+            <Description>HDF4 lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="HDF4_LIBS_WITHOUT_L">
+            <Description>HDF4 libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="HDF5_BASE_DIR">
+            <Description>HDF5 base directory</Description>
+            <Type>PATH</Type>
+            <Value>$(EBROOTHDF5)</Value>
+        </Parameter>
+
+        <Parameter name="HDF5_INC_SUBDIR">
+            <Description>HDF5 include directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>include</Value>
+        </Parameter>
+
+        <Parameter name="HDF5_LIB_SUBDIR">
+            <Description>HDF5 lib directory relative to base dir</Description>
+            <Type>PATH</Type>
+            <Value>lib</Value>
+        </Parameter>
+
+        <Parameter name="HDF5_LIBS_WITHOUT_L">
+            <Description>HDF5 libraries (without -l prefix)</Description>
+            <Type>STRING</Type>
+            <Value>hdf5_hl hdf5</Value>
+        </Parameter>
+
+        <Parameter name="MAKE">
+            <Description>Make command (GNU make min 3.81 needed!)</Description>
+            <Type>STRING</Type>
+            <Value>make</Value>
+        </Parameter>
+
+        <Parameter name="FC">
+            <Description>F90 Compiler</Description>
+            <Type>STRING</Type>
+            <Value>mpif90</Value>
+        </Parameter>
+
+        <Parameter name="FFLAGS">
+            <Description>General F90 flags for compiling</Description>
+            <Type>STRING</Type>
+            <Value>-r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 </Value>
+        </Parameter>
+
+        <Parameter name="FFLAGS_FREEFORM">
+            <Description>Allow for free format Fortran</Description>
+            <Type>STRING</Type>
+            <Value>-free</Value>
+        </Parameter>
+
+        <Parameter name="FFLAGS_FIXEDFORM">
+            <Description>Expect fixed Fortran format</Description>
+            <Type>STRING</Type>
+            <Value>-fixed</Value>
+        </Parameter>
+
+        <Parameter name="FFLAGS_FPP_PREFIX">
+            <Description>Fortran preprocessor flag prefix</Description>
+            <Type>STRING</Type>
+            <Value>-D</Value>
+        </Parameter>
+
+        <Parameter name="CC">
+            <Description>C Compiler</Description>
+            <Type>STRING</Type>
+            <Value>mpicc</Value>
+        </Parameter>
+
+        <Parameter name="CXX">
+            <Description>C++ Compiler</Description>
+            <Type>STRING</Type>
+            <Value>mpicc</Value>
+        </Parameter>
+
+        <Parameter name="CFLAGS">
+            <Description>General C flags for compiling</Description>
+            <Type>STRING</Type>
+            <Value>-O1 -fp-model precise -march=core-avx2</Value>
+        </Parameter>
+
+        <Parameter name="CFLAGS_CPP_PREFIX">
+            <Description>C preprocessor flag prefix</Description>
+            <Type>STRING</Type>
+            <Value>-D</Value>
+        </Parameter>
+
+        <Parameter name="LD">
+            <Description>Linker</Description>
+            <Type>STRING</Type>
+            <Value>mpif90</Value>
+        </Parameter>
+
+        <Parameter name="LDFLAGS">
+            <Description>General flags for linking</Description>
+            <Type>STRING</Type>
+            <Value>-O3 -fp-model precise -march=core-avx2</Value>
+        </Parameter>
+
+        <Parameter name="AR">
+            <Description>Command for building libraries from object files (usually ar)</Description>
+            <Type>STRING</Type>
+            <Value>ar</Value>
+        </Parameter>
+
+        <Parameter name="ARFLAGS">
+            <Description>Flags for library building command (When using ar: include u)</Description>
+            <Type>STRING</Type>
+            <Value>curv</Value>
+        </Parameter>
+
+        <Parameter name="ARFLAGS_EXTRACT">
+            <Description>Flags for library building command (When using ar: include u)</Description>
+            <Type>STRING</Type>
+            <Value>p</Value>
+        </Parameter>
+
+        <Parameter name="CPP">
+            <Description>C preprocessor command</Description>
+            <Type>STRING</Type>
+            <Value>fpp</Value>
+        </Parameter>
+
+        <Parameter name="FPP">
+            <Description>C preprocessor command</Description>
+            <Type>STRING</Type>
+            <Value>fpp</Value>
+        </Parameter>
+
+        <Parameter name="CPPFLAGS">
+            <Description>C preprocessor flags</Description>
+            <Type>STRING</Type>
+            <Value>-P -C -traditional</Value>
+        </Parameter>
+
+        <Parameter name="XIOS_CFLAGS">
+            <Description>CFLAGS flags for XIOS</Description>
+            <Type>STRING</Type>
+            <Value>-ansi -w</Value>
+        </Parameter>
+
+        <Parameter name="XIOS_ADD_LDFLAGS">
+            <Description>More LD flags for XIOS</Description>
+            <Type>STRING</Type>
+            <Value>-lstdc++</Value>
+        </Parameter>
+
+        <Parameter name="OASIS_ADD_FFLAGS">
+            <Description>More F90 flags for Oasis</Description>
+            <Type>STRING</Type>
+            <Value>-132 -check pointers -check uninit</Value>
+        </Parameter>
+
+        <Parameter name="OASIS_ADD_PPDEFS">
+            <Description>More CPP/FPP macros for Oasis</Description>
+            <Type>STRING</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="OASIS_ADD_LDFLAGS">
+            <Description>More LD flags for Oasis</Description>
+            <Type>STRING</Type>
+            <Value/>
+        </Parameter>
+
+        <Parameter name="NEMO_ADD_FFLAGS">
+            <Description>More F90 flags for Nemo</Description>
+            <Type>STRING</Type>
+            <Value>-check pointers -check uninit -fpe0</Value>
+        </Parameter>
+
+        <Parameter name="NEMO_ADD_LDFLAGS">
+            <Description>More LD flags for Nemo</Description>
+            <Type>STRING</Type>
+            <Value>-lstdc++</Value>
+        </Parameter>
+
+        <Parameter name="IFS_PPDEFS">
+            <Description>Preprocessor defs for IFS sources</Description>
+            <Type>STRING</Type>
+            <Value>linux LINUX LITTLE LITTLE_ENDIAN POINTER_64 BLAS</Value>
+        </Parameter>
+
+        <Parameter name="IFSAUX_ADD_FFLAGS">
+            <Description>More F90 flags for ifs/ifsaux</Description>
+            <Type>STRING</Type>
+            <Value></Value>
+        </Parameter>
+
+        <Parameter name="TM5_DEFAULT_FFLAGS">
+            <Description>Default F90 flags for TM5 (ie without optimization)</Description>
+            <Type>STRING</Type>
+            <Value></Value>
+        </Parameter>
+
+        <Parameter name="TM5_OPTIM_FFLAGS">
+            <Description>F90 optimization flags for TM5</Description>
+            <Type>STRING</Type>
+            <Value>-O3</Value>
+        </Parameter>
+
+        <Parameter name="TM5_MDEFS_FFLAGS">
+            <Description>Model preprocessor defs. Set to with_ecearth_optics if aerosols feedback will be switch on.</Description>
+            <Type>STRING</Type>
+            <Value>with_ecearth_optics</Value>
+        </Parameter>
+
+        <Parameter name="TM5_CO2_ONLY">
+            <Description>Light version of TM5 (True or False)</Description>
+            <Type>STRING</Type>
+            <Value>False</Value>
+        </Parameter>
+
+        <Parameter name="MAKEDEPF90">
+            <Description>F90 dependency generator</Description>
+            <Type>STRING</Type>
+            <Value>$(ECEARTH_SRC_DIR)/util/makedepf90/bin/makedepf90</Value>
+        </Parameter>
+
+    </Platform>

+ 50 - 0
sources/runoff-mapper/src/Makefile

@@ -0,0 +1,50 @@
+executable = ../bin/runoff-mapper.exe
+
+FC      = mpif90
+FFLAGS  = -r8 -O3 -march=core-avx2 
+
+LD      = mpif90
+LDFLAGS = 
+
+OASIS_BASE_DIR = ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf
+OASIS_MOD_DIR  = $(OASIS_BASE_DIR)/build/lib/psmile.MPI1
+OASIS_INC_DIR  = $(OASIS_BASE_DIR)/lib/psmile/include
+OASIS_LIB_DIR  = $(OASIS_BASE_DIR)/lib
+OASIS_LIBS     = -lpsmile.MPI1 -lmct -lmpeu -lscrip
+
+MPI_BASE_DIR            := 
+ifneq ($(strip $(MPI_BASE_DIR)),)
+MPI_INC_DIR             := $(MPI_BASE_DIR)$(addprefix /,)
+MPI_LIB_DIR             := $(MPI_BASE_DIR)$(addprefix /,)
+endif
+MPI_LIBS                := 
+
+ifneq ($(strip /),)
+NETCDF_LIB_DIR          := $(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+NETCDF_INC_DIR          := $(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+endif
+NETCDF_LIBS             := netcdff netcdf
+
+VPATH = cplng
+
+cplng_srcs = \
+    cplng_data_mod.F90 \
+    cplng_init_mod.F90 \
+    cplng_config_mod.F90 \
+    cplng_exchange_mod.F90 \
+    cplng_finalize_mod.F90 \
+    cplng_mod.F90
+
+rnfmap_srcs = runoff_mapper_mod.F90 runoff_mapper.F90
+
+$(executable): config_mod.o $(cplng_srcs:.F90=.o) $(rnfmap_srcs:.F90=.o)
+	$(LD) $(LDFLAGS) -o $@ $^ -L$(OASIS_LIB_DIR) $(OASIS_LIBS) \
+	    $(addprefix -L,$(NETCDF_LIB_DIR))  $(addprefix -l,$(NETCDF_LIBS))  \
+	    $(addprefix -L,$(MPI_LIB_DIR))     $(addprefix -l,$(MPI_LIBS))
+
+%.o: %.F90
+	$(FC) $(FFLAGS) -c -I$(OASIS_MOD_DIR) $(addprefix -I,$(NETCDF_INC_DIR)) $(addprefix -I,$(MPI_INC_DIR)) $<
+
+.PHONY: clean
+clean:
+	@rm -f *.o *.mod

+ 212 - 0
sources/tm5mp/ecconfig-ecearth3.rc

@@ -0,0 +1,212 @@
+!
+! This is a rcfile template for COMPILING TM5MP for ECEARTH 3
+!
+!=====================================================================!
+! Run main specifications
+!=====================================================================!
+! [id]  basename for executable, runtime rcfile
+my.basename     : tm5v4
+
+! timing
+timerange.start : 9999-99-99 99:99:99
+timerange.end   : 9999-99-99 99:99:99
+jobstep.length  : inf
+
+! autorun
+submit.auto     : False
+
+! main directories
+ECEARTH_SRC_DIR : ${HOME}/models/ecearth_3.3.3.2/sources
+my.project.dir  : ${HOME}/models/ecearth_3.3.3.2/sources/tm5mp
+my.run.dir      : 
+my.data.dir     :
+
+co2only  : True
+mkdepf90 : $(ECEARTH_SRC_DIR)/util/makedepf90/bin/makedepf90
+
+!=====================================================================!
+! Compiler, make, MPI/OpenMP switches
+!=====================================================================!
+
+coma:,
+compiler.fc            : mpif90
+mpi.compiler.fc        : mpif90
+mpi.compiler.fc.openmp : mpif90
+compiler.defineflag    : -WF,-D
+! All flags from ecconf = default + tm5_specific
+
+!cprepro: PLT:ACTIVE:FFLAGS_FPP_PREFIX  PLT:ACTIVE:TM5_PPDEFS
+
+compiler.flags.default.fflags  : 
+compiler.flags.default.ldflags : 
+
+compiler.flags.optim.fflags  : -O3
+compiler.flags.optim.ldflags : -O3
+
+! 'build.jobs' set in expert.rc, overwritten with script arg
+maker         :  make -j %{build.jobs}
+
+! parallelization
+par.mpi       : T
+
+! dummy nb of cores (pycasso looking for them to write par.ntasks)
+par.nx        : 1
+par.ny        : 45
+
+!=====================================================================!
+! Libraries
+!=====================================================================!
+
+! Z library (used for compression in HDF)
+compiler.lib.z.fflags     : 
+compiler.lib.z.libs       : -lz
+
+! JPEG library (used for compression in HDF)
+JPEG_LIB_DIR                : $(EBROOTOPENJPEG)
+compiler.lib.jpeg.fflags    : -I${JPEG_LIB_DIR}/include
+compiler.lib.jpeg.lib_dir   : -L${JPEG_LIB_DIR}/lib
+compiler.lib.jpeg.libs_no_l : openjpeg
+
+! SZ library (used for compression in HDF)
+SZIP_HOME                   : $(EBROOTSZIP)
+compiler.lib.sz.fflags      : -I${SZIP_HOME}/include
+compiler.lib.sz.lib_dir     : -L${SZIP_HOME}/lib
+compiler.lib.sz.libs_no_l   : sz
+
+! HDF4 library (without netcdf interface)
+HDF_HOME                    : 
+compiler.lib.hdf4.fflags    : -I${HDF_HOME}/ 
+compiler.lib.hdf4.lib_dir   : -L${HDF_HOME}/
+compiler.lib.hdf4.libs_no_l : 
+
+! HDF5 library with parallel IO enabled
+HDF5_PAR_HOME                   : $(EBROOTHDF5)
+compiler.lib.hdf5_par.fflags    : -I${HDF5_PAR_HOME}/include 
+compiler.lib.hdf5_par.lib_dir   : -L${HDF5_PAR_HOME}/lib
+compiler.lib.hdf5_par.libs_no_l : hdf5_hl hdf5
+ 
+! NetCDF4 library with parallel IO enabled
+NETCDF4_HOME                      : /
+compiler.lib.netcdf4_par.fflags   : -I${NETCDF4_HOME}/$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+compiler.lib.netcdf4_par.lib_dir  : -L${NETCDF4_HOME}/$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+compiler.lib.netcdf4_par.libs_no_l: netcdff netcdf
+
+! OASIS3 library
+OASIS3_ARCH                     : ecconf
+OASIS3_MPIV                     : MPI1
+OASIS3_HOME                     : ${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/${OASIS3_ARCH}
+compiler.lib.oasis3.fflags      : -I${OASIS3_HOME}/build/lib/psmile.${OASIS3_MPIV}
+compiler.lib.oasis3.lib_dir     : -L${OASIS3_HOME}/lib
+compiler.lib.oasis3.libs_no_l   : psmile.MPI1 mct mpeu scrip
+! -lpsmile.${OASIS3_MPIV} -lmpp_io
+
+! MPI library
+compiler.lib.mpi.fflags  : -I$(EBROOTIMPI)/mpi/latest/include
+compiler.lib.mpi.lib_dir : -L$(EBROOTIMPI)/mpi/latest/lib
+compiler.lib.mpi.libs_no_l: mpi mpifort
+
+! Lapack library:
+!   -lessl               -lblacs           # serial
+!   -lesslsmp            -lblacssmp        # parallel with OpenMP
+!   -lessl    -lpessl    -lblacs           # parallel with MPI
+!   -lesslsmp -lpesslsmp -lblacssmp        # parallel with MPI and OpenMP
+!
+#if "${par.openmp}" in ["T","True"] :
+my.essl.ext   :  smp
+#else
+my.essl.ext   :  
+#endif
+#if "${par.mpi}" in ["T","True"] :
+my.pessl      :  -lpessl${my.essl.ext}
+#else
+my.pessl      :  
+#endif
+!
+compiler.lib.lapack.fflags    : 
+compiler.lib.lapack.libs      :  -lessl${my.essl.ext} ${my.pessl} -lblacs${my.essl.ext}
+
+!=====================================================================!
+! Grids & Levels
+!=====================================================================!
+#if "${my.meteo.class}" == 'ifs91'
+my.levs       : tropo34
+#elif "${my.meteo.class}" == 'ifs62'
+my.levs       : tropo31
+#else
+my.levs       : all 
+#endif
+! regions name
+my.region1                  :  glb300x200
+! grid names for meteo input (allows on the flight regridding):
+my.region1m                 :  glb100x100
+
+!=====================================================================!
+! Source code (list of proj)
+!=====================================================================!
+#if "${co2only}" in  ["T","True"] :
+my.source.proj : proj/output proj/co2 proj/ecearth
+#else
+my.source.proj : proj/output proj/budget10 proj/cb05 proj/ecearth
+#endif
+
+!=====================================================================!
+! C-preprocessors: DF, TMM, TM5
+!=====================================================================!
+#if "${par.mpi}" in ["T","True"] :
+my.df.define   :  with_hdf5_par with_netcdf4_par
+#else
+my.df.define   :  with_hdf5 with_netcdf4
+#endif
+
+! macro's for meteo input:
+my.tmm.define  : with_tmm_tm5 with_tmm_convec_ec
+
+! process to skip
+#if "${co2only}" in  ["T","True"] :
+! if without_wet_dep is not used then CP (at least) should be added to the met fields received from ifs 
+my.without    :  without_wet_deposition without_chemistry without_dry_deposition
+#else
+my.without    :  
+#endif
+
+my.def_advec  : slopes
+
+#if "${co2only}" in  ["T","True"] :
+my.defs_emis  : 
+my.defs_chem  : 
+#else
+my.defs_emis  : with_ch4_emis
+my.defs_chem  : with_m7 with_optics with_ecearth_optics
+#endif
+
+my.defs_misc  : with_budgets
+my.defs_cpl   : with_prism oasis3 parallel_cplng
+
+my.tm5.define : ${my.without} ${my.def_advec} ${my.defs_misc} ${my.defs_chem} ${my.defs_emis} ${my.defs_cpl} with_ecearth_optics
+
+!=====================================================================!
+! METEO SETTINGS
+!=====================================================================!
+my.meteo.class       : ifs10
+my.meteo.resol       : glb100x100
+my.meteo.format      : tm5-nc
+time.fc              : F
+time.fc.day0         : 
+my.tmm.setup.apply   : T
+! dummy IFS coupling frequency
+cpl.ifs.period  : 6
+! dummy datadir
+ini_data_dir: dummy
+
+#include ${my.project.dir}/rc/meteo-tm5-ecearth3.rc
+
+!=====================================================================!
+! Extra ressources
+!=====================================================================!
+#if "${my.region1}" == "glb100x100"
+#include ${my.project.dir}/rc/regions-glb100x100-chem.rc
+#else
+#include ${my.project.dir}/rc/regions-${my.region1}.rc
+#endif
+
+#include ${my.project.dir}/rc/expert-ecearth3-build.rc

+ 13 - 0
sources/util/ELPiN/Makefile

@@ -0,0 +1,13 @@
+
+FC=mpif90
+FLAGS= -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+
+NETCDF_INC_DIR = $(addsuffix /,/)$(EBROOTNETCDF)/include $(EBROOTNETCDFMINFORTRAN)/include
+NETCDF_LIB_DIR = $(addsuffix /,/)$(EBROOTNETCDF)/lib64 $(EBROOTNETCDFMINFORTRAN)/lib
+NETCDF_LIBS    = $(addprefix -l,netcdff netcdf)
+
+all:
+	mkdir -p bin
+	$(FC) src/mpp_domain_decomposition.f90 -o bin/mpp_domain_decomposition.exe $(FLAGS) -I$(NETCDF_INC_DIR) -L$(NETCDF_LIB_DIR) $(NETCDF_LIBS)
+clean:
+	rm -f bin/mpp_domain_decomposition.exe

+ 1060 - 0
sources/util/ec-conf/ec-conf3

@@ -0,0 +1,1060 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import errno
+import getopt
+import stat
+import re
+import operator
+
+from   xml.etree import ElementTree, ElementInclude
+import xml.sax
+
+
+def info(message, level=1):
+    """ @brief Print info to stdout
+        @param message string to print
+    """
+    if VERBOSE >= level:
+        sys.stderr.write("*II* %s\n" % message)
+
+
+def warning(message):
+    """ @brief Print warning to stdout
+        @param message string to print
+    """
+    if ERROR_ON_WARNING:
+        error(message)
+
+    if WARNING:
+        sys.stderr.write("*WW* %s\n" % message)
+
+
+def error(message):
+    """ @brief Print error to stdout
+        @param message string to print
+    """
+    print("*EE* %s " % message)
+    sys.exit(1)
+
+
+def usage(myname):
+    print("\nUsage:   %s OPTIONS <XML_FILE>\n" % myname)
+
+    print("Read configuration from an XML file and create config files.")
+
+    print("""
+%s reads a data base of configuration parameters from an XML file.
+Subsequently, a number of template files are processed in order to create
+configuration files (targets). See ec-conf User Guide for more information.""" % myname)
+
+    print("""
+Options: -h|--help            Print this help screen.
+         -p|--platform        Set the active platform.
+         -l|--list-platforms  Lists all platforms, which are defined in the XML file, on stdout.
+         -d|--prefix=<PATH>   Creates the directory PATH (if non-existing) and writes all target
+                                files within that directory. Also sets a configuration parameter
+                                named PLT:ACTIVE:PREFIXDIR for use in the template files.
+         -o|--overwrite-parameter <NAME>=<VALUE> 
+                              Allows to set new values to configuration parameters from the
+                                command line, overwriting the values from the XML file.
+                                <NAME> must be given in the form
+                                'COMPONENT_TYPE:COMPONENT_NAME:PARAMETER_NAME' corresponding to
+                                the place-holder syntax of ec-conf. See ec-conf user guide for details.
+                                Multiple parameters can be overwritten by repeating this option.
+         -x|--write-xml       Writes the content of <XML_FILE> in XML format to stdout.
+                              This can be used to normalise the XML file and for test purposes.
+         -g|--gui             Starts the graphical user interface. Turns off -x and -l.
+         -v|--verbose         Produces verbose output (stderr). To increase verbosity,
+                                use more than once.
+         -w|--no-warning      Turns off warnings (however, errors are displayed).
+         -e|--error-on-warning Turns warnings into errors (i.e. ec-conf will stop).
+          """)
+    sys.exit(2)
+
+
+class TextNode(object):
+    """Represents an XML node that contains only text, i.e. is not named and has no child nodes"""
+
+    def __init__(self):
+        self.text = ""
+
+    def add_text(self, content):
+        self.text += content
+
+
+class NamedNode(object):
+    """Represents an XML node with a certain type according to the document
+       definition. Base class for Configuration, Translation, Platform, Model,
+       and Parameter"""
+
+    def __init__(self, attr=None):
+
+        self.name = None
+        self.description = None
+
+        if attr:
+            if 'name' in list(attr.keys()):
+                self.name = attr['name']
+
+        info('Create named node: ' + self.name +
+             ' (' + self.__class__.__name__ + ')', level=2)
+
+    def set(self, key, value):
+        self.__dict__.__setitem__(key, value)
+
+    def xml(self, level=0):
+        """Provides XML representation of the NamedNode as a string"""
+
+        tabwidth = 4
+
+        xml_string = ' ' * level * tabwidth
+        xml_string += '<' + self.__class__.__name__
+        if 'name' in self.__dict__ and self.name:
+            xml_string += ' name="' + self.name + '"'
+        xml_string += '>\n'
+
+        for element in 'description', 'template', 'target', 'properties', 'type', 'value':
+            if element in self.__dict__:
+                xml_string += ' ' * (level + 1) * tabwidth
+                xml_string += '<' + element.capitalize() + '>'
+                if self.__dict__[element]:
+                    xml_string += self.__dict__[element]
+                xml_string += '</' + element.capitalize() + '>\n'
+
+        if level <= 1:
+            xml_string += '\n'
+
+        for element in 'translation', 'platform', 'model', 'parameter':
+            if element in self.__dict__:
+                for child_element in self.__dict__[element]:
+                    xml_string += child_element.xml(level + 1)
+
+        xml_string += ' ' * level * tabwidth
+        xml_string += '</' + self.__class__.__name__ + '>\n'
+
+        if level > 0 and level <= 2:
+            xml_string += '\n'
+
+        return xml_string
+
+    def plt(self, name=None):
+        """Provides short-hand access to the Platform member with name 'name'."""
+
+        if 'platform' in self.__dict__:
+            if name:
+                for plt in self.platform:
+                    if plt.name == name:
+                        return plt
+            else:
+                return self.platform
+        return None
+
+    def mod(self, name=None):
+        """Provides short-hand access to the Model member with name 'name'."""
+
+        if 'model' in self.__dict__:
+            if name:
+                for mod in self.model:
+                    if mod.name == name:
+                        return mod
+            else:
+                return self.model
+        return None
+
+    def par(self, name=None):
+        """Provides short-hand access to the Parameter member with name 'name'."""
+
+        if 'parameter' in self.__dict__:
+            if name:
+                for par in self.parameter:
+                    if par.name == name:
+                        return par
+            else:
+                return self.parameter
+        return None
+
+
+class Translation(NamedNode):
+    """Represents a Translation element of the XML document type"""
+
+    def __init__(self, attr):
+        super(Translation, self).__init__(attr)
+        self.template = None
+        self.target = None
+        self.properties = ''
+        self.is_active = 1
+
+    def set_target(self, target):
+        self.target = target
+
+
+class Platform(NamedNode):
+    """Represents a Platform element of the XML document type"""
+
+    def __init__(self, attr):
+        super(Platform, self).__init__(attr)
+        self.parameter = []
+        self.translation = []
+
+
+class Model(NamedNode):
+    """Represents a Parameter element of the XML document type"""
+
+    def __init__(self, attr):
+        super(Model, self).__init__(attr)
+        self.parameter = []
+
+
+class Parameter(NamedNode):
+    """Represents a Parameter element of the XML document type"""
+
+    def __init__(self, attr):
+        super(Parameter, self).__init__(attr)
+        self.type = None
+        self.value = None
+
+
+class Configuration(NamedNode, xml.sax.handler.ContentHandler):
+    """Represents a Parameter element (which is the root element) of the XML document type"""
+
+    def __init__(self):
+
+        self.translation = []
+        self.platform = []
+        self.model = []
+
+        self.xml_file = None
+        self.active_platform = None
+
+        self.__stack = []
+        self.__types = {'Translation': Translation,
+                        'Platform': Platform,
+                        'Model': Model,
+                        'Parameter': Parameter}
+
+    def xml(self, level=0):
+
+        return '<?xml version="1.0"?>\n\n' \
+               + super(Configuration, self).xml(level)
+
+    def startElement(self, tag, attributes):
+
+        info("Processing XML element '" + str(tag) + "'", level=3)
+
+        if self.__stack:
+            if tag in self.__types:
+                info("Adding NamedNode for element '" + str(tag) + "'", level=3)
+                self.__stack.append(self.__types[tag](attributes))
+            else:
+                info("Adding TextNode for element '" + str(tag) + "'", level=3)
+                self.__stack.append(TextNode())
+        else:
+            self.__stack.append(self)
+
+    def characters(self, content):
+        if isinstance(self.__stack[-1], TextNode):
+            self.__stack[-1].add_text(content)
+
+    def endElement(self, tag):
+        element = self.__stack.pop()
+        if self.__stack:
+            if isinstance(element, TextNode):
+                self.__stack[-1].__dict__[tag.lower()] = element.text
+            else:
+                self.__stack[-1].__dict__[tag.lower()].append(element)
+
+    def parse(self, file):
+
+        info("Parsing XML file '%s'" % file)
+
+        # A short helper function for xml.etree.ElementInclude.include
+        # that parses include files and catches parse errors
+        def include_loader(href, parse, encoding=None):
+            if parse != "xml":
+                error("Only XML includes allowed in xi:include! (see file '%s')" % href)
+            try:
+                with open(href) as file:
+                    data = ElementTree.parse(file).getroot()
+            except IOError as e:
+                error("Can't open include file '%s' for parsing: %s" % (href, e))
+            except ElementTree.ParseError as e:
+                error("XML parse error in include file '%s': %s" % (href, e))
+            return data
+
+        # First parsing stage with xml.etree for include processing
+        try:
+            tree = ElementTree.parse(file)
+        except IOError:
+            error("Can't open file '%s' for parsing" % file)
+        except ElementTree.ParseError as e:
+            error("XML parse error in file '%s': %s" % (file, e))
+
+        # Process XML include files
+        tree_root = tree.getroot()
+        ElementInclude.include(tree_root, loader=include_loader)
+
+        # Second parsing stage with xml.sax to fill data structures
+        # Since errors are catched in the first parsing stage, we assume
+        # everything is fine here.
+        xml.sax.parseString(ElementTree.tostring(tree_root, encoding="UTF-8"),self)
+
+        info("Finished parsing '%s': Translation: %d Platform: %d Model: %d" % (
+            file, len(self.translation), len(self.platform), len(self.model)))
+        self.xml_file = file
+
+    def translate(self, translation):
+
+        rpm_ops = {
+            'ADD': operator.add,
+            'SUB': operator.sub,
+            'MUL': operator.mul,
+            'DIV': operator.truediv,
+            'POW': operator.pow,
+            'MOD': operator.mod
+        }
+
+        subst_re = re.compile(r"\[\[\[(?P<var>[a-zA-Z0-9_:,]+)\]\]\]")
+        var_re = re.compile(r"^(\w{3}):([a-zA-Z0-9_]+):([a-zA-Z0-9_]+)$")
+
+        def parse_var(string):
+
+            # Shortcut for the 'prefixdir' parameter in template file
+            if string.lower() == 'plt:active:prefixdir':
+                return targetPrefixDir if targetPrefixDir else '[[[' + string + ']]]'
+
+            while True:
+
+                match = var_re.search(string)
+
+                if not match:
+                    break
+
+                (category, component, parameter) = match.groups()
+
+                if category.lower() == 'plt' and component.lower() == 'active':
+                    component = self.active_platform
+
+                try:
+                    string = getattr(self, category.lower())(
+                        component).par(parameter).value
+                except BaseException:
+                    warning(
+                        "Unable to process '%s' (Line %d in '%s')" %
+                        (string, line_number, template))
+                    return '[[[' + string + ']]]'
+            return string
+
+        def parse_rpn(string):
+
+            stack = []
+
+            for token in string.split(','):
+
+                token = parse_var(token)
+                if not token:
+                    info(
+                        "Substitute expression with empty string (Line %d in '%s')" %
+                        (line_number, template))
+
+                try:
+                    result = int(token)
+                except ValueError:
+
+                    try:
+                        result = float(token)
+                    except ValueError:
+
+                        if token in list(rpm_ops.keys()):
+
+                            try:
+                                result = rpm_ops[token](
+                                    stack.pop(-2), stack.pop())
+                            except IndexError:
+                                warning(
+                                    "Too few arguments to execute '%s' (Line %d in '%s')" %
+                                    (token, line_number, template))
+                                return "[[[" + string + "]]]"
+                            except BaseException:
+                                warning(
+                                    "Unable to execute '%s' (Line %d in '%s')" %
+                                    (token, line_number, template))
+                                return "[[[" + string + "]]]"
+                        else:
+                            result = token
+
+                stack.append(result)
+
+            if len(stack) > 1:
+                warning(
+                    "Too many operands in '%s' (Line %d in '%s')" %
+                    (string, line_number, template))
+                return "[[[" + string + "]]]"
+
+            return result
+
+        template = translation.template
+        target = translation.target.strip()
+
+        info("Translate: '%s' --> '%s'" % (template, target))
+
+        try:
+            input = open(template, 'r')
+        except IOError:
+            error("Can't open template file '%s' for reading" % template)
+
+        if target:
+
+            if targetPrefixDir:
+                target = os.path.join(targetPrefixDir, target)
+                path = os.path.dirname(target)
+
+                try:
+                    os.makedirs(path)
+                    info("Created target prefix directory '%s'" % path)
+                except OSError as exc:
+                    if exc.errno == errno.EEXIST and os.path.isdir(path):
+                        info(
+                            "Target prefix directory '%s' exists already" %
+                            (path))
+                    else:
+                        error(
+                            "Could not create target prefix directory '%s'" %
+                            (path))
+
+            try:
+                output = open(target, 'w')
+            except IOError:
+                error("Can't open target file '%s' for writing" % target)
+
+            if 'executable' in translation.properties.split(','):
+                os.chmod(target, stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
+                         stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
+                         stat.S_IWUSR)
+        else:
+            output = sys.stdout
+
+        line_number = 0
+        for line in input:
+            line_number += 1
+            ptr = 0
+            buf = ''
+            for match in subst_re.finditer(line):
+                buf += line[ptr:match.start()]
+                var = match.groupdict()['var']
+                try:
+                    buf += str(parse_rpn(var))
+                except UnicodeEncodeError as e:
+                    error('Invalid character in XML file!\n'
+                          '*EE* Look up the \\uXXXX character code from the following message:\n'
+                          '*EE* %s' % str(e))
+                ptr = match.end()
+            buf += line[ptr:]
+
+            output.write(buf)
+
+        input.close()
+        if target:
+            output.close()
+
+    def translate_all(self):
+        for t in self.translation:
+            self.translate(t)
+
+        if self.active_platform:
+            for t in self.plt(self.active_platform).translation:
+                self.translate(t)
+
+
+def start_gui(cfg):
+
+    import tkinter
+    import tkinter.filedialog
+    import tkinter.messagebox
+
+    class VerticalScrolledFrame(tkinter.Frame):
+        """A pure Tkinter scrollable frame that actually works!
+
+        * Use the 'interior' attribute to place widgets inside the scrollable frame
+        * Construct and pack/place/grid normally
+        * This frame only allows vertical scrolling
+
+        http://tkinter.unpythonic.net/wiki/VerticalScrolledFrame
+
+        """
+
+        def __init__(self, parent, *args, **kw):
+            tkinter.Frame.__init__(self, parent, *args, **kw)
+
+            # create a canvas object and a vertical scrollbar for scrolling it
+            vscrollbar = tkinter.Scrollbar(self, orient=tkinter.VERTICAL)
+            vscrollbar.pack(
+                fill=tkinter.Y,
+                side=tkinter.RIGHT,
+                expand=tkinter.FALSE)
+            canvas = tkinter.Canvas(self, bd=0, highlightthickness=0,
+                                    yscrollcommand=vscrollbar.set)
+            canvas.pack(
+                side=tkinter.LEFT,
+                fill=tkinter.BOTH,
+                expand=tkinter.TRUE)
+            vscrollbar.config(command=canvas.yview)
+
+            # reset the view
+            canvas.xview_moveto(0)
+            canvas.yview_moveto(0)
+
+            # create a frame inside the canvas which will be scrolled with it
+            self.interior = interior = tkinter.Frame(canvas)
+            interior_id = canvas.create_window(0, 0, window=interior,
+                                               anchor=tkinter.NW)
+
+            # track changes to the canvas and frame width and sync them,
+            # also updating the scrollbar
+            def _configure_interior(event):
+                # update the scrollbars to match the size of the inner frame
+                size = (interior.winfo_reqwidth(), interior.winfo_reqheight())
+                canvas.config(scrollregion="0 0 %s %s" % size)
+                if interior.winfo_reqwidth() != canvas.winfo_width():
+                    # update the canvas's width to fit the inner frame
+                    canvas.config(width=interior.winfo_reqwidth())
+            interior.bind('<Configure>', _configure_interior)
+
+            def _configure_canvas(event):
+                if interior.winfo_reqwidth() != canvas.winfo_width():
+                    # update the inner frame's width to fill the canvas
+                    canvas.itemconfigure(
+                        interior_id, width=canvas.winfo_width())
+            canvas.bind('<Configure>', _configure_canvas)
+
+            return
+
+    class GUI(tkinter.Tk):
+
+        def __init__(self, cfg):
+
+            tkinter.Tk.__init__(self)
+
+            self.columnconfigure(1, weight=1)
+            self.rowconfigure(0, pad=10)
+            self.rowconfigure(2, weight=1)
+
+            self.__cfg = cfg
+            self.__var_list = []
+            self.__var_dict = {}
+
+            for t in self.__cfg.translation + \
+                    [t for p in self.__cfg.platform for t in p.translation]:
+                v = tkinter.IntVar()
+                v.set(1)
+                v.trace(
+                    'w', lambda n, i, m, t=t: t.set(
+                        'is_active', int(
+                            self.globalgetvar(n))))
+                self.__add_var(v, t)
+
+            self.__active_component = tkinter.StringVar()
+
+            self.__status = tkinter.StringVar()
+
+            self.__component_frame = None
+            self.__parameter_frame = None
+            self.__translation_frame = None
+
+            self.__init_top_panel()
+            self.__init_status_panel()
+            self.__init_main_panel()
+
+            self.grid_propagate(flag=0)
+
+            self.__set_status(
+                "Welcome to EC-CONF's graphical user interface!",
+                time=3000)
+
+        def __add_var(self, var, obj=None):
+            self.__var_list.append(var)
+            if obj:
+                self.__var_dict[obj] = len(self.__var_list) - 1
+
+        def __set_status(self, message, time=8000):
+
+            try:
+                self.after_cancel(self.__status_after_id)
+            except BaseException:
+                pass
+
+            self.__default_status_message = 'Basic usage: SELECT XML database' \
+                ' and template/target files, CONFIGURE parameters, and CREATE' \
+                ' the configuration files.'
+            self.__status.set(message)
+            self.__status_after_id = self.after(
+                time, self.__status.set, self.__default_status_message)
+
+        def __init_top_panel(self):
+
+            w = tkinter.Label(text='The active platform is:')
+            w.grid(row=0, column=0)
+
+            if not self.__cfg.active_platform:
+                self.__cfg.active_platform = self.__cfg.platform[0].name
+
+            v = tkinter.StringVar()
+            v.set(self.__cfg.active_platform)
+            v.trace(
+                'w',
+                lambda n, i, m: self.__set_status(
+                    "Active platform changed to '"
+                    + self.__cfg.active_platform
+                    + "'"))
+            v.trace(
+                'w',
+                lambda n, i, m: self.__fill_parameter_frame(
+                    self.__parameter_frame))
+            v.trace(
+                'w',
+                lambda n, i, m: self.__fill_component_frame(
+                    self.__component_frame))
+            v.trace(
+                'w',
+                lambda n, i, m: self.__fill_translation_frame(
+                    self.__translation_frame))
+            v.trace('w', lambda n, i, m: self.__cfg.set(
+                'active_platform', self.globalgetvar(n)))
+            self.__add_var(v)
+
+            w = tkinter.OptionMenu(None, v, *
+                                   [p.name for p in self.__cfg.platform])
+            w.grid(row=0, column=1, sticky='W', padx=8)
+
+            w = tkinter.Button(text='Select', width=10, height=2, bg='tan')
+            w['command'] = self.__do_select
+            w.grid(row=0, column=2, padx=8)
+
+            w = tkinter.Button(text='Configure', width=10, height=2, bg='tan')
+            w['command'] = self.__do_configure
+            w.grid(row=0, column=3, padx=8)
+
+            w = tkinter.Button(
+                text='Create!',
+                width=10,
+                height=2,
+                bg='darkgrey',
+                fg='white')
+            w['command'] = self.__do_create
+            w.grid(row=0, column=4, padx=8)
+
+        def __init_status_panel(self):
+            w = tkinter.Label(
+                textvariable=self.__status,
+                height=2,
+                bg='orange')
+            w.grid(row=1, column=0, columnspan=5, sticky='EW', pady=5)
+
+        def __init_main_panel(self):
+
+            self.__init_select_panel()
+            self.__init_configure_panel()
+
+            self.__select_panel.grid_remove()
+            self.__configure_panel.grid_remove()
+
+            self.__active_main_panel = self.__select_panel
+            self.__active_main_panel.grid()
+
+        def __init_select_panel(self):
+
+            self.__select_panel = tkinter.Frame()
+            self.__select_panel.grid(
+                row=2, column=0, columnspan=5, sticky='NEWS')
+            self.__select_panel.columnconfigure(0, weight=1)
+            self.__select_panel.rowconfigure(2, weight=1)
+
+            f = tkinter.LabelFrame(
+                self.__select_panel,
+                text='XML database file')
+            f.grid(sticky='NEWS')
+            f.columnconfigure(1, weight=1)
+
+            w = tkinter.Label(f, text='The current file is: ')
+            w.grid(row=0, column=0, sticky='W')
+
+            v = tkinter.StringVar()
+            v.set(self.__cfg.xml_file)
+            v.trace(
+                'w', lambda n, i, m: self.__cfg.set(
+                    'xml_file', self.globalgetvar(n)))
+            self.__add_var(v, 'xml_file')
+
+            w = tkinter.Label(f, textvariable=v, bg='darkgrey', fg='white')
+            w.grid(row=0, column=1, sticky='W')
+
+            w = tkinter.Button(f, text='Save as', width=8)
+            w['command'] = lambda: self.__save_as_xml_file()
+            w.grid(row=0, column=2, sticky='E', padx=4, pady=5)
+
+            w = tkinter.Button(f, text='Save', width=8)
+            w['command'] = lambda: self.__save_xml_file()
+            w.grid(row=0, column=3, sticky='E', padx=4, pady=5)
+
+            tkinter.Frame(self.__select_panel).grid(pady=5)
+
+            self.__translation_frame = tkinter.LabelFrame(
+                self.__select_panel, text='Templates and Targets')
+            self.__translation_frame.grid(sticky='NEWS')
+            self.__translation_frame.columnconfigure(1, weight=1)
+            self.__translation_frame.columnconfigure(3, weight=9)
+
+            self.__fill_translation_frame(self.__translation_frame)
+
+        def __init_configure_panel(self):
+
+            self.__configure_panel = tkinter.Frame()
+            self.__configure_panel.grid(
+                row=2, column=0, columnspan=5, sticky='NEWS')
+            self.__configure_panel.rowconfigure(0, weight=1)
+            self.__configure_panel.columnconfigure(2, weight=1)
+
+            # Component frame
+            self.__component_frame = tkinter.LabelFrame(
+                self.__configure_panel, text='Configurable components')
+            self.__component_frame.grid(sticky='NEWS')
+
+            self.__fill_component_frame(self.__component_frame)
+
+            # Spacer frame
+            f = tkinter.Frame(self.__configure_panel)
+            f.grid(row=0, column=1, padx=2)
+
+            # Parameter frame
+            f = tkinter.LabelFrame(
+                self.__configure_panel,
+                text='Configuration parameters')
+            f.grid(row=0, column=2, sticky='NEWS')
+
+            f = VerticalScrolledFrame(f)
+            f.pack(fill=tkinter.BOTH, expand=tkinter.TRUE)
+
+            self.__parameter_frame = f.interior
+            self.__parameter_frame.columnconfigure(2, weight=1)
+
+            self.__fill_parameter_frame(self.__parameter_frame)
+
+        def __fill_translation_frame(self, frame):
+
+            for w in list(frame.children.values()):
+                w.destroy()
+
+            v = tkinter.IntVar()
+            v.set(1)
+            v.trace('w', lambda n,
+                                i,
+                                m: [self.__var_list[self.__var_dict[t]].set(self.globalgetvar(n))
+                                    for t in self.__cfg.translation +
+                                    self.__cfg.plt(self.__cfg.active_platform).translation])
+            self.__add_var(v)
+
+            w = tkinter.Checkbutton(
+                frame, text='Activate/deactivate all', variable=v)
+            w.grid(row=0, column=0, sticky='W', pady=5)
+
+            r = 1
+            for t in self.__cfg.translation + \
+                    self.__cfg.plt(self.__cfg.active_platform).translation:
+
+                w = tkinter.Checkbutton(
+                    frame, text=t.description, variable=self.__var_list[self.__var_dict[t]])
+                w.grid(row=r, column=0, sticky='W', pady=5)
+
+                w = tkinter.Label(
+                    frame,
+                    text=t.template,
+                    bg='darkgrey',
+                    fg='white')
+                w.grid(row=r, column=1, sticky='E')
+
+                w = tkinter.Label(frame, text=' --> ')
+                w.grid(row=r, column=2)
+
+                v = tkinter.StringVar()
+                v.set(t.target)
+                v.trace(
+                    'w', lambda n, i, m, t=t: t.set_target(
+                        self.globalgetvar(n)))
+                self.__add_var(v)
+                w = tkinter.Entry(frame, textvariable=v)
+
+                w.grid(row=r, column=3, sticky='EW')
+                r += 1
+
+        def __fill_component_frame(self, frame):
+
+            for w in list(frame.children.values()):
+                w.destroy()
+
+            if self.__cfg.active_platform:
+
+                self.__active_component.set(self.__cfg.active_platform)
+
+                w = tkinter.Label(frame, text='Active platform')
+                w.pack(anchor='w')
+
+                c = self.__cfg.plt(self.__cfg.active_platform)
+                w = tkinter.Radiobutton(
+                    frame,
+                    text=c.name,
+                    variable=self.__active_component,
+                    value=c.name)
+                w['command'] = lambda: self.__fill_parameter_frame(
+                    self.__parameter_frame)
+                w.pack(anchor='w', pady=5)
+
+            if self.__cfg.model:
+
+                if not self.__active_component.get():
+                    self.set(self.__cfg.model[0].name)
+
+                w = tkinter.Label(frame, text='Configurable models')
+                w.pack(anchor='w')
+
+                for c in self.__cfg.model:
+                    w = tkinter.Radiobutton(
+                        frame, text=c.name, variable=self.__active_component, value=c.name)
+                    w['command'] = lambda: self.__fill_parameter_frame(
+                        self.__parameter_frame)
+                    w.pack(anchor='w', pady=5)
+
+        def __fill_parameter_frame(self, frame):
+
+            for w in list(frame.children.values()):
+                w.destroy()
+
+            for (name, component) in [(c.name, c)
+                                      for c in self.__cfg.platform + self.__cfg.model]:
+                if self.__active_component.get() == name:
+                    break
+
+            r = 0
+            for p in component.par():
+
+                w = tkinter.Label(
+                    frame, text=p.description, anchor="w", width=35)
+                w.grid(row=r, column=0, sticky='W')
+
+                w = tkinter.Label(frame, text='[' + str(p.name) + ']')
+                w.grid(row=r, column=1, sticky='W', padx=20)
+
+                v = tkinter.StringVar()
+                v.set(p.value)
+                v.trace(
+                    'w', lambda n, i, m, p=p: p.set(
+                        'value', self.globalgetvar(n)))
+                self.__add_var(v)
+                w = tkinter.Entry(frame, textvariable=v)
+                w.grid(row=r, column=2, sticky='EW')
+
+                r += 1
+
+            self.__set_status(
+                'Configure parameters for component \'' +
+                self.__active_component.get() +
+                '\'')
+
+        def __do_select(self):
+            self.__active_main_panel.grid_remove()
+            self.__active_main_panel = self.__select_panel
+            self.__active_main_panel.grid()
+            self.__set_status(
+                'Select the XML data base file and active translations in the panel below.')
+
+        def __do_configure(self):
+            self.__active_main_panel.grid_remove()
+            self.__active_main_panel = self.__configure_panel
+            self.__active_main_panel.grid()
+            self.__set_status('Configure the configuration parameters for the'
+                              ' available components in the panel below.')
+
+        def __do_create(self):
+
+            fw = []
+            for t in self.__cfg.translation + \
+                    self.__cfg.plt(self.__cfg.active_platform).translation:
+                if t.is_active:
+                    self.__cfg.translate(t)
+                    fw.append(t.target)
+
+            if fw:
+                msg = 'Active target files written: ' + fw.pop()
+                while fw:
+                    msg += ', ' + fw.pop()
+            else:
+                msg = 'No targets where written'
+
+            self.__set_status(msg)
+
+        def __save_as_xml_file(self):
+            f = tkinter.filedialog.asksaveasfilename(
+                title='Select a file name for saving:', filetypes=[
+                    ('XML files', '*.xml'), ('All files', '*')])
+            if f:
+                try:
+                    self.__var_list[self.__var_dict['xml_file']].set(
+                        os.path.relpath(f))
+                except AttributeError:
+                    self.__var_list[self.__var_dict['xml_file']].set(
+                        os.path.realpath(f))
+                self.__save_xml_file()
+            else:
+                self.__set_status("Current XML file NOT saved")
+
+        def __save_xml_file(self):
+
+            if os.path.isfile(self.__cfg.xml_file):
+                msg = "The file '" + self.__cfg.xml_file + \
+                    "' exists. Do you want to replace it?"
+                if not tkinter.messagebox.askyesno('Save XML file', msg):
+                    return
+
+            try:
+                f = open(self.__cfg.xml_file, 'w')
+            except IOError:
+                msg = "The file '" + self.__cfg.xml_file + "' could not be opened for writing"
+                tkinter.messagebox.showerror('Save XML file', msg)
+                self.__set_status("XML database NOT saved")
+                return
+
+            f.write(self.__cfg.xml())
+            f.close()
+            self.__set_status(
+                "XML database saved to file '" +
+                self.__cfg.xml_file +
+                "'")
+
+    root = GUI(cfg)
+    root.title('ec-conf GUI')
+
+    min_window_width = min(900, int(0.9 * root.winfo_screenwidth()))
+    min_window_height = min(800, int(0.9 * root.winfo_screenheight()))
+    root.minsize(min_window_width, min_window_height)
+
+    root.resizable()
+    root.mainloop()
+
+
+if __name__ == "__main__":
+
+    # Try to get command line options and arguments
+    try:
+        opts, args = getopt.getopt(sys.argv[1:],
+                                   "hp:gd:o:xlvwe",
+                                   ["help", "platform=", "gui", "prefix=",
+                                    "overwrite-parameter=",
+                                    "write-xml", "list-platforms",
+                                    "verbose", "no-warning", "error-on-warning"])
+    except getopt.GetoptError:
+        usage(os.path.split(sys.argv[0])[-1])
+
+    # Default values, to be overwritten by command line options
+    WARNING = True
+    ERROR_ON_WARNING = False
+    VERBOSE = 0
+    writeXML = False
+    listPlatforms = False
+    wantGUI = False
+    platform = None
+    targetPrefixDir = None
+    overwriteParameters = []
+
+    # Parse command line options
+    for opt, arg in opts:
+        if opt in ('-h', '--help'):
+            usage(os.path.split(sys.argv[0])[-1])
+        elif opt in ('-p', '--platform'):
+            platform = arg
+        elif opt in ('-g', '--gui'):
+            wantGUI = True
+        elif opt in ('-d', '--prefix'):
+            targetPrefixDir = arg
+        elif opt in ('-o', '--overwrite-parameter'):
+            overwriteParameters.append(arg)
+        elif opt in ('-x', '--write-xml'):
+            writeXML = True
+        elif opt in ('-l', '--list-platforms'):
+            listPlatforms = True
+        elif opt in ('-v', '--verbose'):
+            VERBOSE += 1
+        elif opt in ('-w', '--no-warning'):
+            WARNING = False
+        elif opt in ('-e', '--error-on-warning'):
+            ERROR_ON_WARNING = True
+
+    # The XML file is all that should be left on the command line
+    if len(args) != 1:
+        usage(os.path.split(sys.argv[0])[-1])
+
+    # Create the Configuration object and fill the data structures by parsing
+    # the XML file
+    cfg = Configuration()
+    cfg.parse(args[0])
+    info(args[0])
+    info(cfg.parse(args[0]))
+    info(platform)
+
+    # If a platform was given on the command line, try to set it
+    if platform:
+        info(cfg.plt(platform))
+        if cfg.plt(platform):
+            cfg.active_platform = platform
+        else:
+            error(
+                "Platform '%s' not defined in the configuration file '%s'" %
+                (platform, args[0]))
+    elif not (wantGUI or listPlatforms):
+        warning("No active platform given")
+
+    # Overwrite parameters given explicitely on the command line
+    for arg in overwriteParameters:
+
+        # split the name=value pair but make sure additional '=' are preserved
+        name = arg.split('=')[0]
+        value = '='.join(arg.split('=')[1:])
+
+        # Maybe we want to allow overwriting with empty values?!
+        #   if so, remove the following if block
+        if not value:
+            warning("Parameter '%s' in --overwrite-parameter has no value" % name)
+            continue
+
+        try:
+            (category, component, parameter) = name.split(':')
+        except ValueError:
+            warning(
+                "Malformed parameter name given to --overwrite-parameter: '%s'" % name)
+            continue
+
+        if category.lower() == 'plt' and component.lower() == 'active':
+            component = cfg.active_platform
+
+        try:
+            getattr(cfg, category.lower())(
+                component).par(parameter).value = value
+            info("Overwriting parameter '%s' with value '%s'" % (name, value))
+        except AttributeError:
+            warning(
+                "Non-existing parameter name given to --overwrite-parameter: '%s'" % name)
+            continue
+
+    # Select activity to be done according to the command line options
+    # Default is to translate all Translations in the Configuration
+    if wantGUI:
+        info("Starting GUI")
+        start_gui(cfg)
+    elif listPlatforms:
+        if cfg.plt():
+            print('\n'.join([p.name for p in cfg.plt()]))
+        else:
+            warning("No platforms defined in XML file")
+    elif writeXML:
+        sys.stdout.write(cfg.xml())
+    else:
+        cfg.translate_all()

+ 125 - 0
sources/util/grib_table_126/define_table_126.sh

@@ -0,0 +1,125 @@
+#!/bin/bash
+
+# This script defines grib table 126 for EC-Earth usage.
+# The new grib definitions are put in the source dir of EC-Earth 3.
+
+set -e
+
+# --- Path to "grib_api/definitions"
+
+GRIBAPI_BASE_DIR=$(EBROOTGRIB_API)
+
+errmess="*ERROR*: Could not find the path to grib_info or codes_info
+commands. Please set GRIBAPI_BASE_DIR manually, or load the
+GRIB_API or ECCODES module if you rely on module to set your
+env, and run this script again."
+
+if [ -z "${GRIBAPI_BASE_DIR}" ]
+then
+    echo Your GRIBAPI_BASE_DIR is empty
+    echo Trying to retrieve '<your-grib_api-root-dir>/share/[grib_api|eccodes]/definitions'
+    echo   from environment instead... 
+    echo Note that if both grib_info and codes_info are found in your path,
+    echo   GRIB_API takes precedence over ECCODES
+
+    info=($(which grib_info codes_info 2>/dev/null || true))
+    if [ -x "${info[0]}" ]
+    then
+        grib_def_dir=$(${info[0]} | sed -n "s|Default definition files path is used: ||p")
+    else
+        echo "$errmess"
+        exit 1
+    fi
+    if [ -z "${grib_def_dir}" ]
+    then
+        echo "$errmess"
+        exit 1
+    else
+        echo Found it.
+    fi
+elif [ -e ${GRIBAPI_BASE_DIR}/share/grib_api/definitions ]
+then
+    grib_def_dir=${GRIBAPI_BASE_DIR}/share/grib_api/definitions
+
+elif [ -e ${GRIBAPI_BASE_DIR}/share/eccodes/definitions ]
+then
+    grib_def_dir=${GRIBAPI_BASE_DIR}/share/eccodes/definitions
+else
+    echo "ERROR: Path ${GRIBAPI_BASE_DIR}/share/[grib_api|eccodes]/definitions does not exist"
+    exit 1
+fi
+
+# --- Create target and populate
+
+target_dir=${HOME}/models/ecearth_3.3.3.2/sources/util/grib_table_126/grib1/localConcepts/ecmf
+mkdir -p ${target_dir}
+cp ${grib_def_dir}/grib1/localConcepts/ecmf/* ${target_dir}
+
+# --- additions to grib definitions for EC-Earth table 126
+
+cd $target_dir
+
+for ((i=1;i<=255;i++))
+do
+#cfVarname
+    cat >> cfVarName.def <<EOF
+
+# EC-Earth product $i
+'ece${i}.126' = {
+               table2Version = 126 ;
+               indicatorOfParameter = $i ;
+             }
+EOF
+
+# paramId
+    str=126${i}
+    lim=100
+    if [ $i -lt $lim ]; then
+	str=1260${i}
+    fi
+    lim=10
+    if [ $i -lt $lim ]; then
+	str=12600${i}
+    fi
+
+    cat >> paramId.def <<EOF
+
+# EC-Earth product $i
+'${str}' = {
+            table2Version = 126 ;
+            indicatorOfParameter = $i ;
+          }
+EOF
+
+#name
+    cat >> name.def <<EOF
+
+# EC-Earth product $i
+'EC-Earth product $i' = {
+                         table2Version = 126 ;
+                         indicatorOfParameter = $i ;
+                       }
+EOF
+
+#shortName
+    cat >> shortName.def <<EOF
+
+# EC-Earth product $i
+'~' = {
+       table2Version = 126 ;
+       indicatorOfParameter = $i ;
+     }
+EOF
+
+#units
+    cat >> units.def <<EOF
+
+# EC-Earth product $i
+'~' = {
+       table2Version = 126 ;
+       indicatorOfParameter = $i ;
+     }
+EOF
+
+done
+

+ 1 - 0
sources/xios-2.5/arch/arch-ecconf.env

@@ -0,0 +1 @@
+# FILE NOT IN USE

+ 24 - 0
sources/xios-2.5/arch/arch-ecconf.fcm

@@ -0,0 +1,24 @@
+################################################################################
+###################                Projet XIOS               ###################
+################################################################################
+
+%CCOMPILER      mpicc
+%FCOMPILER      mpif90
+%LINKER         mpif90  -nofor-main
+
+%BASE_CFLAGS    -ansi -w
+%PROD_CFLAGS    -O3 -march=core-avx2 -DBOOST_DISABLE_ASSERTS
+%DEV_CFLAGS     -g -O1 -fp-model precise -march=core-avx2
+%DEBUG_CFLAGS   -g 
+
+%BASE_FFLAGS    -D__NONE__
+%PROD_FFLAGS    -r8 -O3 -march=core-avx2
+%DEV_FFLAGS     -g -r8 -O3 -march=core-avx2 -ip -fp-model precise -extend-source 132 
+%DEBUG_FFLAGS   -g 
+
+%BASE_INC       -D__NONE__
+%BASE_LD        -lstdc++
+
+%CPP            mpicc -EP
+%FPP            fpp
+%MAKE           make

+ 15 - 0
sources/xios-2.5/arch/arch-ecconf.path

@@ -0,0 +1,15 @@
+NETCDF_INCDIR="-I${EBROOTNETCDF}/include -I${EBROOTNETCDFMINFORTRAN}/include"
+NETCDF_LIBDIR="-L${EBROOTNETCDF}/lib64 -L${EBROOTNETCDFMINFORTRAN}/lib"
+NETCDF_LIB="-lnetcdf -lnetcdff"
+
+MPI_INCDIR=""
+MPI_LIBDIR=""
+MPI_LIB=""
+
+HDF5_INCDIR="-I${HDF5_DIR}/include"
+HDF5_LIBDIR="-L${HDF5_DIR}/lib"
+HDF5_LIB="-lhdf5_hl -lhdf5 -lz -lcurl"
+
+OASIS_INCDIR='$(addprefix -I,${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf/build/lib/psmile.MPI1)'
+OASIS_LIBDIR='$(addprefix -L,${HOME}/models/ecearth_3.3.3.2/sources/oasis3-mct/ecconf/lib)'
+OASIS_LIB='$(addprefix -l,psmile.MPI1 mct mpeu scrip)'